From b164b2e494bbca4f2207ac53acc4f428ec5d4d7d Mon Sep 17 00:00:00 2001 From: Goutham Pacha Ravi Date: Mon, 11 Aug 2025 22:14:27 -0700 Subject: [PATCH] Retire monasca-api repository This repository is being retired as part of the Monasca project retirement. The project content has been replaced with a retirement notice. Needed-By: I3cb522ce8f51424b64e93c1efaf0dfd1781cd5ac Change-Id: Iaafdd53ec64083e46aa0ac72d37346465826eb67 Signed-off-by: Goutham Pacha Ravi --- .coveragerc | 8 - .stestr.conf | 4 - .zuul.yaml | 262 -- CONTRIBUTING.rst | 19 - README.rst | 144 +- api-ref/locale/.gitkeep | 0 api-ref/source/conf.py | 196 - api-ref/source/index.rst | 22 - babel.cfg | 2 - bindep.txt | 6 - common/build_common.sh | 30 - config-generator/README.rst | 19 - config-generator/__init__.py | 0 config-generator/monasca-api.conf | 10 - config-generator/policy.conf | 4 - contrib/post_test_hook.sh | 94 - devstack/README.md | 124 - devstack/Vagrantfile | 207 - devstack/files/cassandra/monasca_schema.cql | 93 - devstack/files/debs/monasca-api | 9 - .../files/elasticsearch/elasticsearch.yml | 88 - .../grafana/dashboards.d/08-openstack.json | 988 ----- .../grafana/dashboards.d/09-monasca.json | 1603 ------- .../files/grafana/dashboards.d/11-maradb.json | 709 ---- .../files/grafana/dashboards.d/12-api.json | 624 --- .../files/grafana/dashboards.d/13-storm.json | 624 --- .../grafana/dashboards.d/14-persister.json | 624 --- .../grafana/dashboards.d/15-influxdb.json | 564 --- .../grafana/dashboards.d/16-zookeper.json | 760 ---- .../grafana/dashboards.d/17-notification.json | 624 --- .../files/grafana/dashboards.d/18-kafka.json | 624 --- .../files/grafana/dashboards.d/19-statsd.json | 624 --- .../files/grafana/dashboards.d/20-kibana.json | 624 --- .../dashboards.d/22-logtransformer.json | 624 --- .../grafana/dashboards.d/23-logtmetrics.json | 624 --- .../grafana/dashboards.d/24-logpersister.json | 624 --- devstack/files/grafana/grafana-init.py | 176 - devstack/files/grafana/grafana-server | 146 - devstack/files/grafana/grafana.ini | 13 - devstack/files/influxdb/influxdb | 19 - devstack/files/influxdb/influxdb.conf | 579 --- devstack/files/kafka/kafka-server-start.sh | 38 - devstack/files/kafka/kafka.service | 30 - devstack/files/kafka/server.properties | 146 - devstack/files/kibana/kibana.yml | 122 - devstack/files/monasca-agent/elastic.yaml | 7 - devstack/files/monasca-agent/host_alive.yaml | 8 - devstack/files/monasca-agent/http_check.yaml | 27 - .../files/monasca-agent/kafka_consumer.yaml | 14 - .../files/monasca-agent/monasca-reconfigure | 20 - devstack/files/monasca-agent/mysql.yaml | 8 - devstack/files/monasca-agent/process.yaml | 87 - devstack/files/monasca-agent/zk.yaml | 6 - .../monasca-api/apache-monasca-api.template | 18 - devstack/files/monasca-api/api-config.yml | 186 - .../files/monasca-api/monasca-api.service | 29 - devstack/files/monasca-log-agent/agent.conf | 41 - .../monasca-log-metrics/log-metrics.conf | 83 - .../monasca-log-persister/persister.conf | 71 - .../monasca-log-transformer/transformer.conf | 88 - .../files/monasca-persister/persister.yml | 176 - devstack/files/monasca-thresh/monasca-thresh | 67 - .../files/monasca-thresh/thresh-config.yml | 145 - devstack/files/schema/influxdb_setup.py | 138 - devstack/files/storm.yaml | 63 - devstack/files/vertica/mon_alarms.sql | 14 - devstack/files/vertica/mon_metrics.sql | 105 - devstack/files/vertica/roles.sql | 18 - devstack/files/vertica/users.sql | 7 - devstack/files/zookeeper/log4j.properties | 68 - devstack/files/zookeeper/zoo.cfg | 74 - devstack/files/zookeeper/zookeeper.service | 20 - devstack/lib/client.sh | 37 - devstack/lib/constants.sh | 31 - devstack/lib/monasca-log.sh | 804 ---- devstack/lib/notification.sh | 155 - devstack/lib/persister.sh | 273 -- devstack/lib/profile.sh | 54 - devstack/lib/storm.sh | 166 - devstack/lib/ui.sh | 89 - devstack/lib/zookeeper.sh | 73 - devstack/override-defaults | 3 - devstack/plugin.sh | 1555 ------- devstack/settings | 259 -- doc/api-samples/.gitkeep | 0 doc/api-samples/empty.json | 3 - doc/requirements.txt | 7 - doc/source/.gitkeep | 0 doc/source/_static/images/architecture.png | Bin 88600 -> 0 bytes doc/source/_static/images/architecture.svg | 2 - doc/source/admin/index.rst | 89 - doc/source/cli/index.rst | 53 - doc/source/conf.py | 242 -- doc/source/configuration/sample.rst | 40 - doc/source/contributor/.gitignore | 3 - doc/source/contributor/code.rst | 16 - doc/source/contributor/contributing.rst | 188 - doc/source/contributor/db_migrations.rst | 17 - doc/source/contributor/index.rst | 10 - doc/source/glossary.rst | 3 - doc/source/index.rst | 149 - doc/source/install/index.rst | 6 - doc/source/user/index.rst | 6 - docker/Dockerfile | 79 - docker/README.rst | 119 - docker/api-config.ini.j2 | 27 - docker/api-gunicorn.conf.j2 | 15 - docker/api-logging.conf.j2 | 71 - docker/build_image.sh | 150 - docker/health_check.py | 45 - docker/monasca-api.conf.j2 | 870 ---- docker/start.sh | 46 - docs/java.md | 77 - docs/monasca-api-spec.md | 3709 ----------------- etc/api-config.ini | 27 - etc/api-logging.conf | 49 - etc/api-uwsgi.ini | 25 - java/pom.xml | 435 -- java/src/assembly/tar.xml | 29 - java/src/deb/control/control | 9 - java/src/deb/control/prerm | 9 - java/src/deb/etc/api-config.yml-sample | 148 - java/src/main/java/monasca/api/ApiConfig.java | 69 - .../java/monasca/api/MonApiApplication.java | 255 -- .../main/java/monasca/api/MonApiModule.java | 222 - .../api/app/AlarmDefinitionService.java | 417 -- .../java/monasca/api/app/AlarmService.java | 152 - .../monasca/api/app/ApplicationModule.java | 30 - .../java/monasca/api/app/MetricService.java | 108 - .../command/CreateAlarmDefinitionCommand.java | 124 - .../api/app/command/CreateMetricCommand.java | 139 - .../CreateNotificationMethodCommand.java | 111 - .../command/PatchAlarmDefinitionCommand.java | 103 - .../PatchNotificationMethodCommand.java | 105 - .../api/app/command/UpdateAlarmCommand.java | 36 - .../command/UpdateAlarmDefinitionCommand.java | 140 - .../UpdateNotificationMethodCommand.java | 110 - .../java/monasca/api/app/package-info.java | 20 - .../api/app/validation/AlarmValidation.java | 143 - .../app/validation/DimensionValidation.java | 203 - .../app/validation/MetricNameValidation.java | 63 - .../NotificationMethodValidation.java | 55 - .../api/app/validation/Validation.java | 292 -- .../app/validation/ValueMetaValidation.java | 111 - .../java/monasca/api/domain/DomainModule.java | 30 - .../exception/EntityExistsException.java | 37 - .../exception/EntityNotFoundException.java | 37 - .../exception/InvalidEntityException.java | 37 - .../exception/MultipleMetricsException.java | 72 - .../monasca/api/domain/model/alarm/Alarm.java | 302 -- .../api/domain/model/alarm/AlarmCount.java | 63 - .../api/domain/model/alarm/AlarmRepo.java | 70 - .../alarmdefinition/AlarmDefinition.java | 258 -- .../alarmdefinition/AlarmDefinitionRepo.java | 78 - .../alarmstatehistory/AlarmStateHistory.java | 195 - .../AlarmStateHistoryRepo.java | 37 - .../monasca/api/domain/model/common/Link.java | 63 - .../api/domain/model/common/Linked.java | 25 - .../api/domain/model/common/Paged.java | 62 - .../domain/model/dimension/DimensionBase.java | 73 - .../domain/model/dimension/DimensionName.java | 59 - .../domain/model/dimension/DimensionRepo.java | 43 - .../model/dimension/DimensionValue.java | 69 - .../model/measurement/MeasurementRepo.java | 33 - .../model/measurement/Measurements.java | 139 - .../model/metric/MetricDefinitionRepo.java | 36 - .../api/domain/model/metric/MetricName.java | 80 - .../NotificationMethod.java | 126 - .../NotificationMethodRepo.java | 50 - .../NotificationMethodType.java | 66 - .../NotificationMethodTypesRepo.java | 26 - .../domain/model/statistic/StatisticRepo.java | 36 - .../domain/model/statistic/Statistics.java | 54 - .../api/domain/model/version/Version.java | 54 - .../api/domain/model/version/VersionRepo.java | 30 - .../java/monasca/api/domain/package-info.java | 18 - .../domain/service/impl/VersionRepoImpl.java | 45 - .../infrastructure/InfrastructureModule.java | 136 - .../middleware/MiddlewareConfiguration.java | 89 - .../api/infrastructure/package-info.java | 18 - .../persistence/DimensionQueries.java | 72 - .../persistence/PersistUtils.java | 105 - .../SubAlarmDefinitionQueries.java | 64 - .../api/infrastructure/persistence/Utils.java | 60 - .../hibernate/AlarmDefinitionSqlRepoImpl.java | 828 ---- .../hibernate/AlarmHibernateUtils.java | 101 - .../hibernate/AlarmSqlRepoImpl.java | 732 ---- .../persistence/hibernate/BaseSqlRepo.java | 68 - .../NotificationMethodSqlRepoImpl.java | 288 -- .../NotificationMethodTypesSqlRepoImpl.java | 72 - .../InfluxV9AlarmStateHistoryRepo.java | 199 - .../influxdb/InfluxV9DimensionRepo.java | 193 - .../influxdb/InfluxV9MeasurementRepo.java | 234 -- .../InfluxV9MetricDefinitionRepo.java | 295 -- .../influxdb/InfluxV9RepoReader.java | 168 - .../influxdb/InfluxV9StatisticRepo.java | 279 -- .../persistence/influxdb/InfluxV9Utils.java | 331 -- .../persistence/influxdb/Series.java | 80 - .../mysql/AlarmDefinitionMySqlRepoImpl.java | 448 -- .../persistence/mysql/AlarmMySqlRepoImpl.java | 653 --- .../persistence/mysql/MySQLUtils.java | 109 - .../NotificationMethodMySqlRepoImpl.java | 232 -- .../NotificationMethodTypesMySqlRepoImpl.java | 67 - .../AlarmStateHistoryVerticaRepoImpl.java | 322 -- .../vertica/DimensionVerticaRepoImpl.java | 196 - .../vertica/MeasurementVerticaRepoImpl.java | 304 -- .../MetricDefinitionVerticaRepoImpl.java | 282 -- .../persistence/vertica/MetricQueries.java | 356 -- .../vertica/StatisticVerticaRepoImpl.java | 386 -- .../vertica/VerticaDataSourceFactory.java | 28 - .../servlet/MockAuthenticationFilter.java | 93 - .../servlet/PostAuthenticationFilter.java | 215 - .../servlet/PreAuthenticationFilter.java | 134 - .../servlet/RoleAuthorizationFilter.java | 70 - .../api/resource/AlarmDefinitionResource.java | 182 - .../monasca/api/resource/AlarmResource.java | 313 -- .../api/resource/DimensionResource.java | 97 - .../main/java/monasca/api/resource/Links.java | 363 -- .../api/resource/MeasurementResource.java | 107 - .../monasca/api/resource/MetricResource.java | 195 - .../resource/NotificationMethodResource.java | 175 - .../NotificationMethodTypesResource.java | 71 - .../api/resource/StatisticResource.java | 111 - .../monasca/api/resource/VersionResource.java | 66 - .../api/resource/annotation/PATCH.java | 27 - .../ConstraintViolationExceptionMapper.java | 40 - .../EntityExistsExceptionMapper.java | 32 - .../EntityNotFoundExceptionMapper.java | 32 - .../api/resource/exception/ErrorMessage.java | 36 - .../api/resource/exception/Exceptions.java | 169 - .../IllegalArgumentExceptionMapper.java | 31 - .../InvalidEntityExceptionMapper.java | 31 - .../JsonMappingExceptionManager.java | 39 - .../JsonProcessingExceptionMapper.java | 69 - .../MultipleMetricsExceptionMapper.java | 46 - .../exception/ThrowableExceptionMapper.java | 44 - .../SubAlarmExpressionSerializer.java | 47 - java/src/main/resources/api-config.yml | 150 - .../api/app/AlarmDefinitionServiceTest.java | 611 --- .../CreateAlarmDefinitionCommandTest.java | 43 - .../command/CreateNotificationMethodTest.java | 237 -- .../app/validation/AlarmExpressionsTest.java | 62 - .../api/app/validation/DimensionsTest.java | 101 - .../api/app/validation/ValidationTest.java | 38 - .../validation/ValueMetaValidationTest.java | 144 - .../api/domain/model/AbstractModelTest.java | 52 - .../api/domain/model/AlarmDefinitionTest.java | 51 - .../domain/model/NotificationMethodTest.java | 49 - .../monasca/api/domain/model/VersionTest.java | 51 - .../persistence/PersistUtilsTest.java | 54 - .../SubAlarmDefinitionQueriesTest.java | 51 - .../AlarmDefinitionSqlRepositoryImplTest.java | 498 --- .../hibernate/AlarmHibernateUtilsTest.java | 169 - .../hibernate/AlarmSqlRepositoryImplTest.java | 561 --- .../persistence/hibernate/HibernateUtil.java | 115 - ...tificationMethodSqlRepositoryImplTest.java | 184 - ...ationMethodTypesSqlRepositoryImplTest.java | 88 - .../persistence/hibernate/TestHelper.java | 46 - .../influxdb/InfluxV9UtilsTest.java | 92 - ...larmDefinitionMySqlRepositoryImplTest.java | 328 -- .../mysql/AlarmMySqlRepositoryImplTest.java | 405 -- ...ficationMethodMySqlRepositoryImplTest.java | 219 - ...tionMethodTypeMySqlRepositoryImplTest.java | 78 - ...StateHistoryVerticaRepositoryImplTest.java | 71 - .../MeasurementVerticaRepositoryImplTest.java | 113 - ...icDefinitionVerticaRepositoryImplTest.java | 152 - .../vertica/MetricQueriesTest.java | 88 - .../api/integration/AlarmIntegrationTest.java | 188 - .../integration/MetricIntegrationTest.java | 100 - .../NotificationMethodIntegrationTest.java | 131 - .../integration/docker/ITInfluxDBTest.java | 649 --- .../resource/AbstractMonApiResourceTest.java | 44 - .../resource/AlarmDefinitionResourceTest.java | 436 -- .../api/resource/DimensionResourceTest.java | 73 - .../java/monasca/api/resource/LinksTest.java | 137 - .../api/resource/MetricResourceTest.java | 333 -- .../NotificationMethodResourceTest.java | 402 -- .../NotificationMethodTypeResourceTest.java | 108 - .../api/resource/StatisticResourceTest.java | 99 - .../api/resource/VersionResourceTest.java | 110 - .../api/resource/exception/ErrorMessages.java | 66 - java/src/test/resources/fixtures/alarm.json | 1 - .../test/resources/fixtures/metricSet.json | 1 - .../fixtures/metricWithoutDimensionName.json | 1 - .../resources/fixtures/metricWithoutName.json | 1 - .../fixtures/metricWithoutTimestamp.json | 1 - .../fixtures/metricWithoutValue.json | 1 - .../src/test/resources/fixtures/newAlarm.json | 1 - .../fixtures/newNotificationMethod.json | 1 - .../newNotificationMethodWithInvalidEnum.json | 1 - ...ewNotificationMethodWithLowercaseEnum.json | 1 - .../newNotificationMethodWithPeriod.json | 1 - .../fixtures/notificationMethod.json | 1 - java/src/test/resources/fixtures/version.json | 1 - .../src/test/resources/monasca-api-config.yml | 142 - .../persistence/mysql/alarm.sql | 89 - .../persistence/mysql/notification_method.sql | 11 - .../mysql/notification_method_type.sql | 4 - .../persistence/vertica/metrics.sql | 79 - monasca_api/__init__.py | 0 monasca_api/api/__init__.py | 0 monasca_api/api/alarm_definitions_api_v2.py | 38 - monasca_api/api/alarms_api_v2.py | 52 - monasca_api/api/core/__init__.py | 0 monasca_api/api/core/log/__init__.py | 0 monasca_api/api/core/log/exceptions.py | 35 - monasca_api/api/core/log/log_publisher.py | 215 - monasca_api/api/core/log/model.py | 119 - monasca_api/api/core/log/validation.py | 246 -- monasca_api/api/core/request.py | 117 - monasca_api/api/core/request_context.py | 36 - monasca_api/api/healthcheck_api.py | 53 - monasca_api/api/logs_api.py | 61 - monasca_api/api/metrics_api_v2.py | 75 - monasca_api/api/notifications_api_v2.py | 38 - monasca_api/api/notificationstype_api_v2.py | 38 - monasca_api/api/server.py | 148 - monasca_api/api/versions_api.py | 26 - monasca_api/api/wsgi.py | 26 - monasca_api/cmd/__init__.py | 0 monasca_api/cmd/monasca_db.py | 165 - monasca_api/cmd/status.py | 57 - monasca_api/common/__init__.py | 0 monasca_api/common/messaging/__init__.py | 0 monasca_api/common/messaging/exceptions.py | 17 - .../common/messaging/fake_publisher.py | 24 - .../common/messaging/kafka_publisher.py | 64 - .../messaging/message_formats/__init__.py | 0 .../messaging/message_formats/metrics.py | 33 - monasca_api/common/messaging/publisher.py | 24 - monasca_api/common/policy/__init__.py | 0 monasca_api/common/policy/i18n.py | 46 - monasca_api/common/policy/policy_engine.py | 256 -- monasca_api/common/repositories/__init__.py | 0 .../alarm_definitions_repository.py | 68 - .../common/repositories/alarms_repository.py | 53 - .../common/repositories/cassandra/__init__.py | 0 .../cassandra/metrics_repository.py | 1091 ----- monasca_api/common/repositories/constants.py | 1 - monasca_api/common/repositories/exceptions.py | 38 - .../common/repositories/fake/__init__.py | 0 .../repositories/fake/metrics_repository.py | 23 - .../common/repositories/influxdb/__init__.py | 0 .../influxdb/metrics_repository.py | 973 ----- .../common/repositories/metrics_repository.py | 63 - .../common/repositories/model/__init__.py | 0 .../model/sub_alarm_definition.py | 155 - .../notification_method_type_repository.py | 25 - .../repositories/notifications_repository.py | 44 - .../common/repositories/sqla/__init__.py | 0 .../sqla/alarm_definitions_repository.py | 803 ---- .../repositories/sqla/alarms_repository.py | 631 --- .../common/repositories/sqla/models.py | 265 -- .../notification_method_type_repository.py | 41 - .../sqla/notifications_repository.py | 252 -- .../repositories/sqla/sql_repository.py | 105 - monasca_api/common/rest/__init__.py | 0 monasca_api/common/rest/exceptions.py | 39 - monasca_api/common/rest/utils.py | 115 - monasca_api/conf/__init__.py | 82 - monasca_api/conf/cassandra.py | 63 - monasca_api/conf/dispatcher.py | 81 - monasca_api/conf/global.py | 45 - monasca_api/conf/influxdb.py | 54 - monasca_api/conf/kafka.py | 83 - monasca_api/conf/log_publisher.py | 48 - monasca_api/conf/messaging.py | 52 - monasca_api/conf/repositories.py | 72 - monasca_api/conf/security.py | 56 - monasca_api/conf/types.py | 63 - monasca_api/config.py | 87 - monasca_api/db/README | 12 - monasca_api/db/__init__.py | 0 monasca_api/db/alembic.ini | 74 - monasca_api/db/alembic/__init__.py | 0 monasca_api/db/alembic/env.py | 81 - monasca_api/db/alembic/script.py.mako | 24 - .../alembic/versions/00597b5c8325_initial.py | 587 --- .../0cce983d957a_deterministic_alarms.py | 42 - ...98bb7_remove_builtin_notification_types.py | 70 - ...2434b_remove_event_and_migration_tables.py | 176 - .../6b2b88f3cab4_add_sub_alarm_state.py | 44 - ...c1_add_inhibited_and_silenced_to_alarms.py | 48 - .../c2f85438d6f3_period_notifications.py | 42 - ...d8b801498850_remove_stream_action_types.py | 49 - ...6_remove_inhibited_silenced_from_alarms.py | 48 - monasca_api/db/fingerprint.py | 113 - monasca_api/expression_parser/__init__.py | 0 .../expression_parser/alarm_expr_parser.py | 380 -- monasca_api/hacking/__init__.py | 0 monasca_api/hacking/checks.py | 28 - monasca_api/healthcheck/__init__.py | 0 monasca_api/healthcheck/alarms_db_check.py | 49 - monasca_api/healthcheck/base.py | 36 - monasca_api/healthcheck/kafka_check.py | 79 - monasca_api/healthcheck/keystone_protocol.py | 65 - monasca_api/healthcheck/metrics_db_check.py | 48 - monasca_api/healthchecks.py | 58 - monasca_api/policies/__init__.py | 70 - monasca_api/policies/alarms.py | 166 - monasca_api/policies/delegate.py | 35 - monasca_api/policies/healthcheck.py | 44 - monasca_api/policies/logs.py | 42 - monasca_api/policies/metrics.py | 67 - monasca_api/policies/notifications.py | 101 - monasca_api/policies/versions.py | 38 - monasca_api/tests/__init__.py | 0 monasca_api/tests/base.py | 233 -- monasca_api/tests/cmd/__init__.py | 0 monasca_api/tests/cmd/test_status.py | 38 - monasca_api/tests/config.py | 40 - monasca_api/tests/db/__init__.py | 0 monasca_api/tests/db/test_fingerprint.py | 65 - monasca_api/tests/policy/__init__.py | 0 monasca_api/tests/policy/base.py | 102 - monasca_api/tests/policy/test_policy.py | 274 -- monasca_api/tests/sqlite_alarm.sql | 123 - monasca_api/tests/test_a_repository.py | 1142 ----- monasca_api/tests/test_ad_repository.py | 943 ----- monasca_api/tests/test_alarm_expression.py | 153 - monasca_api/tests/test_alarms.py | 2092 ---------- .../tests/test_alarms_db_health_check.py | 60 - monasca_api/tests/test_config_types.py | 55 - monasca_api/tests/test_healthchecks.py | 137 - monasca_api/tests/test_helpers.py | 148 - monasca_api/tests/test_kafka_health_check.py | 86 - monasca_api/tests/test_keystone_protocol.py | 44 - monasca_api/tests/test_log_publisher.py | 291 -- monasca_api/tests/test_logs.py | 177 - monasca_api/tests/test_message_formats.py | 55 - monasca_api/tests/test_metrics.py | 325 -- .../tests/test_metrics_db_health_check.py | 56 - monasca_api/tests/test_models_repository.py | 164 - monasca_api/tests/test_nm_repository.py | 242 -- monasca_api/tests/test_notifications.py | 604 --- monasca_api/tests/test_policy.py | 254 -- monasca_api/tests/test_query_helpers.py | 143 - monasca_api/tests/test_repositories.py | 752 ---- monasca_api/tests/test_request.py | 153 - monasca_api/tests/test_rest.py | 81 - .../tests/test_sub_alarm_definition.py | 124 - monasca_api/tests/test_validation.py | 379 -- monasca_api/tests/test_versions.py | 69 - monasca_api/v2/__init__.py | 0 monasca_api/v2/common/__init__.py | 0 monasca_api/v2/common/bulk_processor.py | 134 - monasca_api/v2/common/exceptions.py | 37 - monasca_api/v2/common/helpers.py | 63 - monasca_api/v2/common/schemas/__init__.py | 0 .../alarm_definition_request_body_schema.py | 83 - .../v2/common/schemas/alarm_update_schema.py | 43 - monasca_api/v2/common/schemas/exceptions.py | 17 - .../notifications_request_body_schema.py | 93 - monasca_api/v2/common/utils.py | 17 - monasca_api/v2/common/validation.py | 68 - monasca_api/v2/reference/__init__.py | 0 monasca_api/v2/reference/alarm_definitions.py | 756 ---- monasca_api/v2/reference/alarming.py | 182 - monasca_api/v2/reference/alarms.py | 516 --- monasca_api/v2/reference/helpers.py | 790 ---- monasca_api/v2/reference/logs.py | 89 - monasca_api/v2/reference/metrics.py | 353 -- monasca_api/v2/reference/notifications.py | 277 -- monasca_api/v2/reference/notificationstype.py | 44 - monasca_api/v2/reference/resource.py | 58 - monasca_api/v2/reference/version_2_0.py | 37 - monasca_api/v2/reference/versions.py | 65 - monasca_api/version.py | 18 - perf/README.md | 34 - perf/monasca_query_test.jmx | 624 --- playbooks/docker-publish.yml | 12 - pom.xml | 53 - releasenotes/locale/.gitkeep | 0 ...loadbalancing-policy-4d5d0e7e8064a870.yaml | 5 - ...status-upgrade-check-c37e6910c2eb0150.yaml | 12 - ...lient_enabled_option-7be9bc4e0fcecc70.yaml | 7 - ...rms-count-dimensions-f746ca6c725335b2.yaml | 8 - .../apache-kafka-101-d5f3454fd445c727.yaml | 7 - ...ectiontimeout-option-13bc68e2fcd56580.yaml | 4 - .../notes/config_gen-ead0282db82e6c0f.yaml | 6 - ...ormatted-policy-file-2f00c9efa9e274af.yaml | 20 - ...-legacy-kafka-client-16dd1ac1894fe7a0.yaml | 6 - .../notes/drop-py-2-7-aee58a9afab0e4b9.yaml | 6 - ...arch-cluster-upgrade-4b7bdc9c17e0169f.yaml | 5 - .../enable-disk-tsi-40f29262a0301531.yaml | 8 - .../notes/fix-cassandra-cluster-port.yaml | 4 - ...ration-issue-2006984-6676bd3a8a34c9ae.yaml | 6 - .../notes/influx_1.3.3-1be2009139641336.yaml | 5 - .../notes/influx_1.3.8-e6b0be63d7d7222f.yaml | 5 - ...rt-for-db-per-tenant-6ada0c3979de6df8.yaml | 6 - .../influxdb_1.3.9-666fc98a5357890d.yaml | 6 - .../notes/mergeapis-baa6905c7b8fd070.yaml | 11 - .../notes/os-docs-550ce9ad68a4a29e.yaml | 7 - .../notes/oslo-policy-345ff286820badc6.yaml | 6 - .../notes/oslo-policy-aebaebd218b9d2ff.yaml | 5 - ...riodic-notifications-68f6c2ed6f89ebc0.yaml | 4 - ...-database-url-option-efd6c09b771063c0.yaml | 5 - ...cation_plugins_in_db-140ece49106b4a5a.yaml | 6 - .../notes/storm_1.1.3-4aa16a9c648cd89b.yaml | 5 - ...ring-kafka-post-size-4baa10353e859b8a.yaml | 4 - ...ion-names-and-values-e5a2ba64700dcd0b.yaml | 7 - .../upgrade-influxdb-3fa94ef4b15c8217.yaml | 10 - .../notes/upgrade-storm-7b4f262d3783d589.yaml | 5 - .../upgrade_kafka_2.0.1-b53f180d751e47f5.yaml | 6 - ...ard-config-file-path-a4c1a29d9d3fcc07.yaml | 7 - releasenotes/source/2023.1.rst | 6 - releasenotes/source/2023.2.rst | 6 - releasenotes/source/conf.py | 212 - releasenotes/source/index.rst | 23 - releasenotes/source/pike.rst | 6 - releasenotes/source/queens.rst | 6 - releasenotes/source/rocky.rst | 6 - releasenotes/source/stein.rst | 6 - releasenotes/source/train.rst | 6 - releasenotes/source/unreleased.rst | 5 - releasenotes/source/ussuri.rst | 6 - releasenotes/source/victoria.rst | 6 - releasenotes/source/wallaby.rst | 6 - releasenotes/source/xena.rst | 6 - releasenotes/source/yoga.rst | 6 - releasenotes/source/zed.rst | 6 - requirements.txt | 31 - run_maven.sh | 59 - setup.cfg | 62 - setup.py | 20 - test-requirements.txt | 26 - tools/bashate.sh | 6 - tox.ini | 179 - 528 files changed, 7 insertions(+), 74761 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .stestr.conf delete mode 100644 .zuul.yaml delete mode 100644 CONTRIBUTING.rst delete mode 100644 api-ref/locale/.gitkeep delete mode 100644 api-ref/source/conf.py delete mode 100644 api-ref/source/index.rst delete mode 100644 babel.cfg delete mode 100644 bindep.txt delete mode 100755 common/build_common.sh delete mode 100644 config-generator/README.rst delete mode 100644 config-generator/__init__.py delete mode 100644 config-generator/monasca-api.conf delete mode 100644 config-generator/policy.conf delete mode 100755 contrib/post_test_hook.sh delete mode 100644 devstack/README.md delete mode 100644 devstack/Vagrantfile delete mode 100644 devstack/files/cassandra/monasca_schema.cql delete mode 100644 devstack/files/debs/monasca-api delete mode 100644 devstack/files/elasticsearch/elasticsearch.yml delete mode 100644 devstack/files/grafana/dashboards.d/08-openstack.json delete mode 100644 devstack/files/grafana/dashboards.d/09-monasca.json delete mode 100644 devstack/files/grafana/dashboards.d/11-maradb.json delete mode 100644 devstack/files/grafana/dashboards.d/12-api.json delete mode 100644 devstack/files/grafana/dashboards.d/13-storm.json delete mode 100644 devstack/files/grafana/dashboards.d/14-persister.json delete mode 100644 devstack/files/grafana/dashboards.d/15-influxdb.json delete mode 100644 devstack/files/grafana/dashboards.d/16-zookeper.json delete mode 100644 devstack/files/grafana/dashboards.d/17-notification.json delete mode 100644 devstack/files/grafana/dashboards.d/18-kafka.json delete mode 100644 devstack/files/grafana/dashboards.d/19-statsd.json delete mode 100644 devstack/files/grafana/dashboards.d/20-kibana.json delete mode 100644 devstack/files/grafana/dashboards.d/22-logtransformer.json delete mode 100644 devstack/files/grafana/dashboards.d/23-logtmetrics.json delete mode 100644 devstack/files/grafana/dashboards.d/24-logpersister.json delete mode 100644 devstack/files/grafana/grafana-init.py delete mode 100755 devstack/files/grafana/grafana-server delete mode 100644 devstack/files/grafana/grafana.ini delete mode 100644 devstack/files/influxdb/influxdb delete mode 100644 devstack/files/influxdb/influxdb.conf delete mode 100644 devstack/files/kafka/kafka-server-start.sh delete mode 100644 devstack/files/kafka/kafka.service delete mode 100644 devstack/files/kafka/server.properties delete mode 100644 devstack/files/kibana/kibana.yml delete mode 100644 devstack/files/monasca-agent/elastic.yaml delete mode 100644 devstack/files/monasca-agent/host_alive.yaml delete mode 100644 devstack/files/monasca-agent/http_check.yaml delete mode 100644 devstack/files/monasca-agent/kafka_consumer.yaml delete mode 100644 devstack/files/monasca-agent/monasca-reconfigure delete mode 100644 devstack/files/monasca-agent/mysql.yaml delete mode 100644 devstack/files/monasca-agent/process.yaml delete mode 100644 devstack/files/monasca-agent/zk.yaml delete mode 100644 devstack/files/monasca-api/apache-monasca-api.template delete mode 100644 devstack/files/monasca-api/api-config.yml delete mode 100644 devstack/files/monasca-api/monasca-api.service delete mode 100644 devstack/files/monasca-log-agent/agent.conf delete mode 100644 devstack/files/monasca-log-metrics/log-metrics.conf delete mode 100644 devstack/files/monasca-log-persister/persister.conf delete mode 100644 devstack/files/monasca-log-transformer/transformer.conf delete mode 100644 devstack/files/monasca-persister/persister.yml delete mode 100644 devstack/files/monasca-thresh/monasca-thresh delete mode 100644 devstack/files/monasca-thresh/thresh-config.yml delete mode 100644 devstack/files/schema/influxdb_setup.py delete mode 100644 devstack/files/storm.yaml delete mode 100644 devstack/files/vertica/mon_alarms.sql delete mode 100644 devstack/files/vertica/mon_metrics.sql delete mode 100644 devstack/files/vertica/roles.sql delete mode 100644 devstack/files/vertica/users.sql delete mode 100644 devstack/files/zookeeper/log4j.properties delete mode 100644 devstack/files/zookeeper/zoo.cfg delete mode 100644 devstack/files/zookeeper/zookeeper.service delete mode 100644 devstack/lib/client.sh delete mode 100644 devstack/lib/constants.sh delete mode 100644 devstack/lib/monasca-log.sh delete mode 100644 devstack/lib/notification.sh delete mode 100644 devstack/lib/persister.sh delete mode 100644 devstack/lib/profile.sh delete mode 100644 devstack/lib/storm.sh delete mode 100644 devstack/lib/ui.sh delete mode 100644 devstack/lib/zookeeper.sh delete mode 100644 devstack/override-defaults delete mode 100755 devstack/plugin.sh delete mode 100644 devstack/settings delete mode 100644 doc/api-samples/.gitkeep delete mode 100644 doc/api-samples/empty.json delete mode 100644 doc/requirements.txt delete mode 100644 doc/source/.gitkeep delete mode 100644 doc/source/_static/images/architecture.png delete mode 100644 doc/source/_static/images/architecture.svg delete mode 100644 doc/source/admin/index.rst delete mode 100644 doc/source/cli/index.rst delete mode 100644 doc/source/conf.py delete mode 100644 doc/source/configuration/sample.rst delete mode 100644 doc/source/contributor/.gitignore delete mode 100644 doc/source/contributor/code.rst delete mode 100644 doc/source/contributor/contributing.rst delete mode 100644 doc/source/contributor/db_migrations.rst delete mode 100644 doc/source/contributor/index.rst delete mode 100644 doc/source/glossary.rst delete mode 100644 doc/source/index.rst delete mode 100644 doc/source/install/index.rst delete mode 100644 doc/source/user/index.rst delete mode 100644 docker/Dockerfile delete mode 100644 docker/README.rst delete mode 100644 docker/api-config.ini.j2 delete mode 100644 docker/api-gunicorn.conf.j2 delete mode 100644 docker/api-logging.conf.j2 delete mode 100755 docker/build_image.sh delete mode 100755 docker/health_check.py delete mode 100644 docker/monasca-api.conf.j2 delete mode 100644 docker/start.sh delete mode 100644 docs/java.md delete mode 100644 docs/monasca-api-spec.md delete mode 100644 etc/api-config.ini delete mode 100644 etc/api-logging.conf delete mode 100644 etc/api-uwsgi.ini delete mode 100644 java/pom.xml delete mode 100644 java/src/assembly/tar.xml delete mode 100644 java/src/deb/control/control delete mode 100644 java/src/deb/control/prerm delete mode 100755 java/src/deb/etc/api-config.yml-sample delete mode 100644 java/src/main/java/monasca/api/ApiConfig.java delete mode 100644 java/src/main/java/monasca/api/MonApiApplication.java delete mode 100644 java/src/main/java/monasca/api/MonApiModule.java delete mode 100644 java/src/main/java/monasca/api/app/AlarmDefinitionService.java delete mode 100644 java/src/main/java/monasca/api/app/AlarmService.java delete mode 100644 java/src/main/java/monasca/api/app/ApplicationModule.java delete mode 100644 java/src/main/java/monasca/api/app/MetricService.java delete mode 100644 java/src/main/java/monasca/api/app/command/CreateAlarmDefinitionCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/CreateMetricCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/CreateNotificationMethodCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/PatchAlarmDefinitionCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/PatchNotificationMethodCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/UpdateAlarmCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/UpdateAlarmDefinitionCommand.java delete mode 100644 java/src/main/java/monasca/api/app/command/UpdateNotificationMethodCommand.java delete mode 100644 java/src/main/java/monasca/api/app/package-info.java delete mode 100644 java/src/main/java/monasca/api/app/validation/AlarmValidation.java delete mode 100644 java/src/main/java/monasca/api/app/validation/DimensionValidation.java delete mode 100644 java/src/main/java/monasca/api/app/validation/MetricNameValidation.java delete mode 100644 java/src/main/java/monasca/api/app/validation/NotificationMethodValidation.java delete mode 100644 java/src/main/java/monasca/api/app/validation/Validation.java delete mode 100644 java/src/main/java/monasca/api/app/validation/ValueMetaValidation.java delete mode 100644 java/src/main/java/monasca/api/domain/DomainModule.java delete mode 100644 java/src/main/java/monasca/api/domain/exception/EntityExistsException.java delete mode 100644 java/src/main/java/monasca/api/domain/exception/EntityNotFoundException.java delete mode 100644 java/src/main/java/monasca/api/domain/exception/InvalidEntityException.java delete mode 100644 java/src/main/java/monasca/api/domain/exception/MultipleMetricsException.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarm/Alarm.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarm/AlarmCount.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarm/AlarmRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinition.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinitionRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistory.java delete mode 100644 java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistoryRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/common/Link.java delete mode 100644 java/src/main/java/monasca/api/domain/model/common/Linked.java delete mode 100644 java/src/main/java/monasca/api/domain/model/common/Paged.java delete mode 100644 java/src/main/java/monasca/api/domain/model/dimension/DimensionBase.java delete mode 100644 java/src/main/java/monasca/api/domain/model/dimension/DimensionName.java delete mode 100644 java/src/main/java/monasca/api/domain/model/dimension/DimensionRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/dimension/DimensionValue.java delete mode 100644 java/src/main/java/monasca/api/domain/model/measurement/MeasurementRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/measurement/Measurements.java delete mode 100644 java/src/main/java/monasca/api/domain/model/metric/MetricDefinitionRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/metric/MetricName.java delete mode 100644 java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethod.java delete mode 100644 java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodType.java delete mode 100644 java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodTypesRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/statistic/StatisticRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/model/statistic/Statistics.java delete mode 100644 java/src/main/java/monasca/api/domain/model/version/Version.java delete mode 100644 java/src/main/java/monasca/api/domain/model/version/VersionRepo.java delete mode 100644 java/src/main/java/monasca/api/domain/package-info.java delete mode 100644 java/src/main/java/monasca/api/domain/service/impl/VersionRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/InfrastructureModule.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/middleware/MiddlewareConfiguration.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/package-info.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/DimensionQueries.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/PersistUtils.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueries.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/Utils.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtils.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/BaseSqlRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9AlarmStateHistoryRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9DimensionRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MeasurementRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MetricDefinitionRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9RepoReader.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9StatisticRepo.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9Utils.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/influxdb/Series.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/mysql/MySQLUtils.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypesMySqlRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/DimensionVerticaRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricQueries.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/StatisticVerticaRepoImpl.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/persistence/vertica/VerticaDataSourceFactory.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/servlet/MockAuthenticationFilter.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/servlet/PostAuthenticationFilter.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/servlet/PreAuthenticationFilter.java delete mode 100644 java/src/main/java/monasca/api/infrastructure/servlet/RoleAuthorizationFilter.java delete mode 100644 java/src/main/java/monasca/api/resource/AlarmDefinitionResource.java delete mode 100644 java/src/main/java/monasca/api/resource/AlarmResource.java delete mode 100644 java/src/main/java/monasca/api/resource/DimensionResource.java delete mode 100644 java/src/main/java/monasca/api/resource/Links.java delete mode 100644 java/src/main/java/monasca/api/resource/MeasurementResource.java delete mode 100644 java/src/main/java/monasca/api/resource/MetricResource.java delete mode 100644 java/src/main/java/monasca/api/resource/NotificationMethodResource.java delete mode 100644 java/src/main/java/monasca/api/resource/NotificationMethodTypesResource.java delete mode 100644 java/src/main/java/monasca/api/resource/StatisticResource.java delete mode 100644 java/src/main/java/monasca/api/resource/VersionResource.java delete mode 100644 java/src/main/java/monasca/api/resource/annotation/PATCH.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/ConstraintViolationExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/EntityExistsExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/EntityNotFoundExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/ErrorMessage.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/Exceptions.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/IllegalArgumentExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/InvalidEntityExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/JsonMappingExceptionManager.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/JsonProcessingExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/MultipleMetricsExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/exception/ThrowableExceptionMapper.java delete mode 100644 java/src/main/java/monasca/api/resource/serialization/SubAlarmExpressionSerializer.java delete mode 100644 java/src/main/resources/api-config.yml delete mode 100644 java/src/test/java/monasca/api/app/AlarmDefinitionServiceTest.java delete mode 100644 java/src/test/java/monasca/api/app/command/CreateAlarmDefinitionCommandTest.java delete mode 100644 java/src/test/java/monasca/api/app/command/CreateNotificationMethodTest.java delete mode 100644 java/src/test/java/monasca/api/app/validation/AlarmExpressionsTest.java delete mode 100644 java/src/test/java/monasca/api/app/validation/DimensionsTest.java delete mode 100644 java/src/test/java/monasca/api/app/validation/ValidationTest.java delete mode 100644 java/src/test/java/monasca/api/app/validation/ValueMetaValidationTest.java delete mode 100644 java/src/test/java/monasca/api/domain/model/AbstractModelTest.java delete mode 100644 java/src/test/java/monasca/api/domain/model/AlarmDefinitionTest.java delete mode 100644 java/src/test/java/monasca/api/domain/model/NotificationMethodTest.java delete mode 100644 java/src/test/java/monasca/api/domain/model/VersionTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/PersistUtilsTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueriesTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtilsTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/HibernateUtil.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/hibernate/TestHelper.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9UtilsTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypeMySqlRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepositoryImplTest.java delete mode 100644 java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricQueriesTest.java delete mode 100644 java/src/test/java/monasca/api/integration/AlarmIntegrationTest.java delete mode 100644 java/src/test/java/monasca/api/integration/MetricIntegrationTest.java delete mode 100644 java/src/test/java/monasca/api/integration/NotificationMethodIntegrationTest.java delete mode 100644 java/src/test/java/monasca/api/integration/docker/ITInfluxDBTest.java delete mode 100644 java/src/test/java/monasca/api/resource/AbstractMonApiResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/AlarmDefinitionResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/DimensionResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/LinksTest.java delete mode 100644 java/src/test/java/monasca/api/resource/MetricResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/NotificationMethodResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/NotificationMethodTypeResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/StatisticResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/VersionResourceTest.java delete mode 100644 java/src/test/java/monasca/api/resource/exception/ErrorMessages.java delete mode 100644 java/src/test/resources/fixtures/alarm.json delete mode 100644 java/src/test/resources/fixtures/metricSet.json delete mode 100644 java/src/test/resources/fixtures/metricWithoutDimensionName.json delete mode 100644 java/src/test/resources/fixtures/metricWithoutName.json delete mode 100644 java/src/test/resources/fixtures/metricWithoutTimestamp.json delete mode 100644 java/src/test/resources/fixtures/metricWithoutValue.json delete mode 100644 java/src/test/resources/fixtures/newAlarm.json delete mode 100644 java/src/test/resources/fixtures/newNotificationMethod.json delete mode 100644 java/src/test/resources/fixtures/newNotificationMethodWithInvalidEnum.json delete mode 100644 java/src/test/resources/fixtures/newNotificationMethodWithLowercaseEnum.json delete mode 100644 java/src/test/resources/fixtures/newNotificationMethodWithPeriod.json delete mode 100644 java/src/test/resources/fixtures/notificationMethod.json delete mode 100644 java/src/test/resources/fixtures/version.json delete mode 100644 java/src/test/resources/monasca-api-config.yml delete mode 100644 java/src/test/resources/monasca/api/infrastructure/persistence/mysql/alarm.sql delete mode 100644 java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method.sql delete mode 100644 java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method_type.sql delete mode 100644 java/src/test/resources/monasca/api/infrastructure/persistence/vertica/metrics.sql delete mode 100644 monasca_api/__init__.py delete mode 100644 monasca_api/api/__init__.py delete mode 100644 monasca_api/api/alarm_definitions_api_v2.py delete mode 100644 monasca_api/api/alarms_api_v2.py delete mode 100644 monasca_api/api/core/__init__.py delete mode 100644 monasca_api/api/core/log/__init__.py delete mode 100644 monasca_api/api/core/log/exceptions.py delete mode 100644 monasca_api/api/core/log/log_publisher.py delete mode 100644 monasca_api/api/core/log/model.py delete mode 100644 monasca_api/api/core/log/validation.py delete mode 100644 monasca_api/api/core/request.py delete mode 100644 monasca_api/api/core/request_context.py delete mode 100644 monasca_api/api/healthcheck_api.py delete mode 100644 monasca_api/api/logs_api.py delete mode 100644 monasca_api/api/metrics_api_v2.py delete mode 100644 monasca_api/api/notifications_api_v2.py delete mode 100644 monasca_api/api/notificationstype_api_v2.py delete mode 100644 monasca_api/api/server.py delete mode 100644 monasca_api/api/versions_api.py delete mode 100644 monasca_api/api/wsgi.py delete mode 100644 monasca_api/cmd/__init__.py delete mode 100644 monasca_api/cmd/monasca_db.py delete mode 100644 monasca_api/cmd/status.py delete mode 100644 monasca_api/common/__init__.py delete mode 100644 monasca_api/common/messaging/__init__.py delete mode 100644 monasca_api/common/messaging/exceptions.py delete mode 100644 monasca_api/common/messaging/fake_publisher.py delete mode 100644 monasca_api/common/messaging/kafka_publisher.py delete mode 100644 monasca_api/common/messaging/message_formats/__init__.py delete mode 100644 monasca_api/common/messaging/message_formats/metrics.py delete mode 100644 monasca_api/common/messaging/publisher.py delete mode 100644 monasca_api/common/policy/__init__.py delete mode 100644 monasca_api/common/policy/i18n.py delete mode 100644 monasca_api/common/policy/policy_engine.py delete mode 100644 monasca_api/common/repositories/__init__.py delete mode 100644 monasca_api/common/repositories/alarm_definitions_repository.py delete mode 100644 monasca_api/common/repositories/alarms_repository.py delete mode 100644 monasca_api/common/repositories/cassandra/__init__.py delete mode 100644 monasca_api/common/repositories/cassandra/metrics_repository.py delete mode 100644 monasca_api/common/repositories/constants.py delete mode 100644 monasca_api/common/repositories/exceptions.py delete mode 100644 monasca_api/common/repositories/fake/__init__.py delete mode 100644 monasca_api/common/repositories/fake/metrics_repository.py delete mode 100644 monasca_api/common/repositories/influxdb/__init__.py delete mode 100644 monasca_api/common/repositories/influxdb/metrics_repository.py delete mode 100644 monasca_api/common/repositories/metrics_repository.py delete mode 100644 monasca_api/common/repositories/model/__init__.py delete mode 100644 monasca_api/common/repositories/model/sub_alarm_definition.py delete mode 100644 monasca_api/common/repositories/notification_method_type_repository.py delete mode 100644 monasca_api/common/repositories/notifications_repository.py delete mode 100644 monasca_api/common/repositories/sqla/__init__.py delete mode 100644 monasca_api/common/repositories/sqla/alarm_definitions_repository.py delete mode 100644 monasca_api/common/repositories/sqla/alarms_repository.py delete mode 100644 monasca_api/common/repositories/sqla/models.py delete mode 100644 monasca_api/common/repositories/sqla/notification_method_type_repository.py delete mode 100644 monasca_api/common/repositories/sqla/notifications_repository.py delete mode 100644 monasca_api/common/repositories/sqla/sql_repository.py delete mode 100644 monasca_api/common/rest/__init__.py delete mode 100644 monasca_api/common/rest/exceptions.py delete mode 100644 monasca_api/common/rest/utils.py delete mode 100644 monasca_api/conf/__init__.py delete mode 100644 monasca_api/conf/cassandra.py delete mode 100644 monasca_api/conf/dispatcher.py delete mode 100644 monasca_api/conf/global.py delete mode 100644 monasca_api/conf/influxdb.py delete mode 100644 monasca_api/conf/kafka.py delete mode 100644 monasca_api/conf/log_publisher.py delete mode 100644 monasca_api/conf/messaging.py delete mode 100644 monasca_api/conf/repositories.py delete mode 100644 monasca_api/conf/security.py delete mode 100644 monasca_api/conf/types.py delete mode 100644 monasca_api/config.py delete mode 100644 monasca_api/db/README delete mode 100644 monasca_api/db/__init__.py delete mode 100644 monasca_api/db/alembic.ini delete mode 100644 monasca_api/db/alembic/__init__.py delete mode 100644 monasca_api/db/alembic/env.py delete mode 100644 monasca_api/db/alembic/script.py.mako delete mode 100644 monasca_api/db/alembic/versions/00597b5c8325_initial.py delete mode 100644 monasca_api/db/alembic/versions/0cce983d957a_deterministic_alarms.py delete mode 100644 monasca_api/db/alembic/versions/26083b298bb7_remove_builtin_notification_types.py delete mode 100644 monasca_api/db/alembic/versions/30181b42434b_remove_event_and_migration_tables.py delete mode 100644 monasca_api/db/alembic/versions/6b2b88f3cab4_add_sub_alarm_state.py delete mode 100644 monasca_api/db/alembic/versions/8781a256f0c1_add_inhibited_and_silenced_to_alarms.py delete mode 100644 monasca_api/db/alembic/versions/c2f85438d6f3_period_notifications.py delete mode 100644 monasca_api/db/alembic/versions/d8b801498850_remove_stream_action_types.py delete mode 100644 monasca_api/db/alembic/versions/f69cb3152a76_remove_inhibited_silenced_from_alarms.py delete mode 100644 monasca_api/db/fingerprint.py delete mode 100644 monasca_api/expression_parser/__init__.py delete mode 100644 monasca_api/expression_parser/alarm_expr_parser.py delete mode 100644 monasca_api/hacking/__init__.py delete mode 100644 monasca_api/hacking/checks.py delete mode 100644 monasca_api/healthcheck/__init__.py delete mode 100644 monasca_api/healthcheck/alarms_db_check.py delete mode 100644 monasca_api/healthcheck/base.py delete mode 100644 monasca_api/healthcheck/kafka_check.py delete mode 100644 monasca_api/healthcheck/keystone_protocol.py delete mode 100644 monasca_api/healthcheck/metrics_db_check.py delete mode 100644 monasca_api/healthchecks.py delete mode 100644 monasca_api/policies/__init__.py delete mode 100644 monasca_api/policies/alarms.py delete mode 100644 monasca_api/policies/delegate.py delete mode 100644 monasca_api/policies/healthcheck.py delete mode 100644 monasca_api/policies/logs.py delete mode 100644 monasca_api/policies/metrics.py delete mode 100644 monasca_api/policies/notifications.py delete mode 100644 monasca_api/policies/versions.py delete mode 100644 monasca_api/tests/__init__.py delete mode 100644 monasca_api/tests/base.py delete mode 100644 monasca_api/tests/cmd/__init__.py delete mode 100644 monasca_api/tests/cmd/test_status.py delete mode 100644 monasca_api/tests/config.py delete mode 100644 monasca_api/tests/db/__init__.py delete mode 100644 monasca_api/tests/db/test_fingerprint.py delete mode 100644 monasca_api/tests/policy/__init__.py delete mode 100644 monasca_api/tests/policy/base.py delete mode 100644 monasca_api/tests/policy/test_policy.py delete mode 100644 monasca_api/tests/sqlite_alarm.sql delete mode 100644 monasca_api/tests/test_a_repository.py delete mode 100644 monasca_api/tests/test_ad_repository.py delete mode 100644 monasca_api/tests/test_alarm_expression.py delete mode 100644 monasca_api/tests/test_alarms.py delete mode 100644 monasca_api/tests/test_alarms_db_health_check.py delete mode 100644 monasca_api/tests/test_config_types.py delete mode 100644 monasca_api/tests/test_healthchecks.py delete mode 100644 monasca_api/tests/test_helpers.py delete mode 100644 monasca_api/tests/test_kafka_health_check.py delete mode 100644 monasca_api/tests/test_keystone_protocol.py delete mode 100644 monasca_api/tests/test_log_publisher.py delete mode 100644 monasca_api/tests/test_logs.py delete mode 100644 monasca_api/tests/test_message_formats.py delete mode 100644 monasca_api/tests/test_metrics.py delete mode 100644 monasca_api/tests/test_metrics_db_health_check.py delete mode 100644 monasca_api/tests/test_models_repository.py delete mode 100644 monasca_api/tests/test_nm_repository.py delete mode 100644 monasca_api/tests/test_notifications.py delete mode 100644 monasca_api/tests/test_policy.py delete mode 100644 monasca_api/tests/test_query_helpers.py delete mode 100644 monasca_api/tests/test_repositories.py delete mode 100644 monasca_api/tests/test_request.py delete mode 100644 monasca_api/tests/test_rest.py delete mode 100644 monasca_api/tests/test_sub_alarm_definition.py delete mode 100644 monasca_api/tests/test_validation.py delete mode 100644 monasca_api/tests/test_versions.py delete mode 100644 monasca_api/v2/__init__.py delete mode 100644 monasca_api/v2/common/__init__.py delete mode 100644 monasca_api/v2/common/bulk_processor.py delete mode 100644 monasca_api/v2/common/exceptions.py delete mode 100644 monasca_api/v2/common/helpers.py delete mode 100644 monasca_api/v2/common/schemas/__init__.py delete mode 100644 monasca_api/v2/common/schemas/alarm_definition_request_body_schema.py delete mode 100644 monasca_api/v2/common/schemas/alarm_update_schema.py delete mode 100644 monasca_api/v2/common/schemas/exceptions.py delete mode 100644 monasca_api/v2/common/schemas/notifications_request_body_schema.py delete mode 100644 monasca_api/v2/common/utils.py delete mode 100644 monasca_api/v2/common/validation.py delete mode 100644 monasca_api/v2/reference/__init__.py delete mode 100644 monasca_api/v2/reference/alarm_definitions.py delete mode 100644 monasca_api/v2/reference/alarming.py delete mode 100644 monasca_api/v2/reference/alarms.py delete mode 100644 monasca_api/v2/reference/helpers.py delete mode 100644 monasca_api/v2/reference/logs.py delete mode 100644 monasca_api/v2/reference/metrics.py delete mode 100644 monasca_api/v2/reference/notifications.py delete mode 100644 monasca_api/v2/reference/notificationstype.py delete mode 100644 monasca_api/v2/reference/resource.py delete mode 100644 monasca_api/v2/reference/version_2_0.py delete mode 100644 monasca_api/v2/reference/versions.py delete mode 100644 monasca_api/version.py delete mode 100644 perf/README.md delete mode 100644 perf/monasca_query_test.jmx delete mode 100644 playbooks/docker-publish.yml delete mode 100644 pom.xml delete mode 100644 releasenotes/locale/.gitkeep delete mode 100644 releasenotes/notes/add-cassandra-loadbalancing-policy-4d5d0e7e8064a870.yaml delete mode 100644 releasenotes/notes/add-monasca-status-upgrade-check-c37e6910c2eb0150.yaml delete mode 100644 releasenotes/notes/add_legacy_kafka_client_enabled_option-7be9bc4e0fcecc70.yaml delete mode 100644 releasenotes/notes/alarms-count-dimensions-f746ca6c725335b2.yaml delete mode 100644 releasenotes/notes/apache-kafka-101-d5f3454fd445c727.yaml delete mode 100644 releasenotes/notes/cassandra-connectiontimeout-option-13bc68e2fcd56580.yaml delete mode 100644 releasenotes/notes/config_gen-ead0282db82e6c0f.yaml delete mode 100644 releasenotes/notes/deprecate-json-formatted-policy-file-2f00c9efa9e274af.yaml delete mode 100644 releasenotes/notes/disable-legacy-kafka-client-16dd1ac1894fe7a0.yaml delete mode 100644 releasenotes/notes/drop-py-2-7-aee58a9afab0e4b9.yaml delete mode 100644 releasenotes/notes/elasticsearch-cluster-upgrade-4b7bdc9c17e0169f.yaml delete mode 100644 releasenotes/notes/enable-disk-tsi-40f29262a0301531.yaml delete mode 100644 releasenotes/notes/fix-cassandra-cluster-port.yaml delete mode 100644 releasenotes/notes/fix-db-migration-issue-2006984-6676bd3a8a34c9ae.yaml delete mode 100644 releasenotes/notes/influx_1.3.3-1be2009139641336.yaml delete mode 100644 releasenotes/notes/influx_1.3.8-e6b0be63d7d7222f.yaml delete mode 100644 releasenotes/notes/influxdb-support-for-db-per-tenant-6ada0c3979de6df8.yaml delete mode 100644 releasenotes/notes/influxdb_1.3.9-666fc98a5357890d.yaml delete mode 100644 releasenotes/notes/mergeapis-baa6905c7b8fd070.yaml delete mode 100644 releasenotes/notes/os-docs-550ce9ad68a4a29e.yaml delete mode 100644 releasenotes/notes/oslo-policy-345ff286820badc6.yaml delete mode 100644 releasenotes/notes/oslo-policy-aebaebd218b9d2ff.yaml delete mode 100644 releasenotes/notes/relax-constraints-for-setting-periodic-notifications-68f6c2ed6f89ebc0.yaml delete mode 100644 releasenotes/notes/remove-database-url-option-efd6c09b771063c0.yaml delete mode 100644 releasenotes/notes/stop_pre_populating_built_in_monasca_notification_plugins_in_db-140ece49106b4a5a.yaml delete mode 100644 releasenotes/notes/storm_1.1.3-4aa16a9c648cd89b.yaml delete mode 100644 releasenotes/notes/support-configuring-kafka-post-size-4baa10353e859b8a.yaml delete mode 100644 releasenotes/notes/support-timerange-for-dimension-names-and-values-e5a2ba64700dcd0b.yaml delete mode 100644 releasenotes/notes/upgrade-influxdb-3fa94ef4b15c8217.yaml delete mode 100644 releasenotes/notes/upgrade-storm-7b4f262d3783d589.yaml delete mode 100644 releasenotes/notes/upgrade_kafka_2.0.1-b53f180d751e47f5.yaml delete mode 100644 releasenotes/notes/use-standard-config-file-path-a4c1a29d9d3fcc07.yaml delete mode 100644 releasenotes/source/2023.1.rst delete mode 100644 releasenotes/source/2023.2.rst delete mode 100644 releasenotes/source/conf.py delete mode 100644 releasenotes/source/index.rst delete mode 100644 releasenotes/source/pike.rst delete mode 100644 releasenotes/source/queens.rst delete mode 100644 releasenotes/source/rocky.rst delete mode 100644 releasenotes/source/stein.rst delete mode 100644 releasenotes/source/train.rst delete mode 100644 releasenotes/source/unreleased.rst delete mode 100644 releasenotes/source/ussuri.rst delete mode 100644 releasenotes/source/victoria.rst delete mode 100644 releasenotes/source/wallaby.rst delete mode 100644 releasenotes/source/xena.rst delete mode 100644 releasenotes/source/yoga.rst delete mode 100644 releasenotes/source/zed.rst delete mode 100644 requirements.txt delete mode 100755 run_maven.sh delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 test-requirements.txt delete mode 100755 tools/bashate.sh delete mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 595e00616..000000000 --- a/.coveragerc +++ /dev/null @@ -1,8 +0,0 @@ -[run] -branch = True -source = monasca_api -omit = monasca_api/tests/* - -[report] -ignore_errors = True - diff --git a/.stestr.conf b/.stestr.conf deleted file mode 100644 index 7df73acb2..000000000 --- a/.stestr.conf +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -test_path=$OS_TEST_PATH -top_dir=./ -group_regex=monasca_api\.tests(?:\.|_)([^_]+) diff --git a/.zuul.yaml b/.zuul.yaml deleted file mode 100644 index 701ab2c96..000000000 --- a/.zuul.yaml +++ /dev/null @@ -1,262 +0,0 @@ -- job: - name: monasca-tempest-base - parent: devstack-tempest - description: | - Base job for running tempest tests with monasca-api devstack plugin. - timeout: 7800 - required-projects: - - openstack/monasca-agent - - openstack/monasca-api - - openstack/monasca-common - - openstack/monasca-grafana-datasource - - openstack/monasca-notification - - openstack/monasca-persister - - openstack/monasca-statsd - - openstack/monasca-thresh - - openstack/monasca-ui - - openstack/python-monascaclient - - openstack/tempest - - openstack/monasca-tempest-plugin - vars: - devstack_services: - # Disable unneeded services. - etcd3: false - g-api: false - g-reg: false - n-api: false - n-api-meta: false - n-cauth: false - n-cond: false - n-cpu: false - n-novnc: false - n-obj: false - n-sch: false - placement-api: false - s-account: false - s-container: false - s-object: false - s-proxy: false - c-api: false - c-bak: false - c-sch: false - c-vol: false - cinder: false - horizon: false - monasca-log: false - tox_envlist: all - devstack_localrc: - USE_PYTHON3: true - tempest_test_regex: monasca_tempest_tests.tests.api - devstack_plugins: - monasca-api: https://opendev.org/openstack/monasca-api - zuul_copy_output: - /var/log/kafka: logs - /var/log/storm: logs - /var/log/zookeeper: logs - /var/log/monasca/notification: logs - /etc/kafka/server.properties: logs - /etc/kafka/producer.properties: logs - /etc/kafka/consumer.properties: logs - /etc/monasca/monasca-notification.conf: logs - irrelevant-files: - - ^.*\.rst$ - - ^.*\.md$ - - ^doc/.*$ - - ^releasenotes/.*$ - -- job: - name: monasca-tempest-log-base - parent: devstack-tempest - description: | - Base job for running tempest tests with monasca-log-api devstack plugin. - timeout: 7800 - required-projects: - - openstack/monasca-api - - openstack/monasca-common - - openstack/python-monascaclient - - openstack/tempest - - openstack/monasca-tempest-plugin - - openstack/monasca-kibana-plugin - vars: - devstack_services: - monasca-log: true - monasca-log-api: true - # Disable unneeded services. - monasca-agent: false - monasca-notification: false - monasca-persister: false - monasca-thresh: false - monasca-storm: false - etcd3: false - g-api: false - g-reg: false - n-api: false - n-api-meta: false - n-cauth: false - n-cond: false - n-cpu: false - n-novnc: false - n-obj: false - n-sch: false - placement-api: false - s-account: false - s-container: false - s-object: false - s-proxy: false - c-api: false - c-bak: false - c-sch: false - c-vol: false - cinder: false - horizon: false - tox_envlist: all - devstack_localrc: - USE_PYTHON3: true - tempest_test_regex: monasca_tempest_tests.tests.log_api - devstack_plugins: - monasca-api: https://opendev.org/openstack/monasca-api - zuul_copy_output: - /var/log/kafka: logs - /var/log/monasca/notification: logs - /etc/kafka/server.properties: logs - /etc/kafka/producer.properties: logs - /etc/kafka/consumer.properties: logs - /etc/monasca/monasca-notification.conf: logs - irrelevant-files: - - ^.*\.rst$ - - ^.*\.md$ - - ^doc/.*$ - - ^releasenotes/.*$ - -- job: - name: monasca-tempest-python3-influxdb - parent: monasca-tempest-base - vars: - devstack_localrc: - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: python - MONASCA_METRICS_DB: influxdb - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - -- job: - name: monasca-tempest-java-influxdb - parent: monasca-tempest-base - voting: false - vars: - devstack_localrc: - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: java - MONASCA_METRICS_DB: influxdb - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - -- job: - name: monasca-tempest-python3-cassandra - parent: monasca-tempest-base - voting: false - vars: - devstack_localrc: - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: python - MONASCA_METRICS_DB: cassandra - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - tempest_test_regex: (?!.*\[.*\btimerange\b.*\])(^monasca_tempest_tests.tests.api) - -- job: - name: monasca-tempest-python3-java-cassandra - parent: monasca-tempest-base - voting: false - vars: - devstack_localrc: - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: java - MONASCA_METRICS_DB: cassandra - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - tempest_test_regex: (?!.*\[.*\btimerange\b.*\])(^monasca_tempest_tests.tests.api) - -- job: - name: monasca-tempest-log-python3-influxdb - parent: monasca-tempest-log-base - voting: false - vars: - devstack_localrc: - USE_OLD_LOG_API: false - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: python - MONASCA_METRICS_DB: influxdb - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - -- job: - name: monasca-tempest-log-oldapi-python3-influxdb - parent: monasca-tempest-log-base - vars: - devstack_localrc: - USE_OLD_LOG_API: true - MONASCA_API_IMPLEMENTATION_LANG: python - MONASCA_PERSISTER_IMPLEMENTATION_LANG: python - MONASCA_METRICS_DB: influxdb - TEMPEST_PLUGINS: /opt/stack/monasca-tempest-plugin - -- project: - queue: monasca - templates: - - check-requirements - - openstack-cover-jobs - - openstack-python3-jobs - - publish-openstack-docs-pti - - release-notes-jobs-python3 - check: - jobs: - - monasca-tempest-log-python3-influxdb - - monasca-tempest-python3-influxdb - - build-monasca-docker-image - gate: - jobs: - - monasca-tempest-python3-influxdb - - monasca-tempest-log-python3-influxdb - post: - jobs: - - publish-monasca-api-docker-image - periodic: - jobs: - - publish-monasca-api-docker-image - release: - jobs: - - publish-monasca-api-docker-image - -- job: - name: publish-monasca-api-docker-image - parent: build-monasca-docker-image - post-run: playbooks/docker-publish.yml - required-projects: - - openstack/monasca-common - vars: - publisher: true - secrets: - - doker_hub_login_api - -- secret: - name: doker_hub_login_api - data: - user: !encrypted/pkcs1-oaep - - JQCFFaHjbMstCHOxW9JxepuFbsdOKhs/swCkfbzgqiNDyL91e2AF6tazGf8EtMxX6brP4 - vCGWFEkMUGxBxVbpwRMUf4mSCOzs2+3/4S1NVr5+ppQEgYVDON4S5BbC2jnwawuB4DKLa - tDYkmgTMIPqxK29giQN/W/wbo0uIAObpRJuCyHY/eNxfC4fzajpGisyu2D3b606Cw15uA - KRw5GexwxN/D0pPvAgT/lwlVrm8Lz9NUziQANL+55gjo9Xs0OIu+OXwiUhZE45TmfpBLF - VO9oOWT+Fhpv2LTA8M1m5vrDo5H03qbn/MNZaiqcPYnPJtnd4pQ0o7DewUk6OBZcPnnVB - RGJm3aO0cs19s0LEwbXxe9URzWeilp/IQdRY42FySmz/OcBqicie0FEr+W0vkq+6lvkmZ - Z9UTw7WDaF0kauhS/yy8wgcbdARmKemBQWWJLZg+2HuVLFqMLlo6bMMagtIN1x/bPj+SX - 4VuPfqCJoynvroQTqY+QuDBJy9Q6IoRGlWUGyIW3kOskRZRsYpaAKZExgNkmkumJ7jJr9 - vI9DOTYxQMBlRptEAExy5OOkIypmpYqoPEF9PnutjIGEHN4gy+ZPHMwD7ht3aG+riDhHl - P1BUBH4cRVOpkHcsqP46hw1uty3QEZMjlk2j8831bfr+opagsaGTqK1CVMM6zg= - - password: !encrypted/pkcs1-oaep - - r5q4gVugtWsad/4/BeTmu6PFdlHkxe+q8R3TOnsitUrnnpQH4y0jq4edIgzQtfQUyk+bV - 9Z4mr7UkI/Yd2Ia1IgzgyjSNLxABtm1JevzLD0XeQTtr/uurTXTMoQJqcveAWWfumrlVx - sIxlr9uujByrmS1OLhFc3S0TBcpwk5hg786RNC6MJVnrQJwKy8MVZaGvh95OoGXuEGkIg - z06afI+QGcJExV1n+zp1aCX1SKIOZlEG0Y70HJYUSfBjtXTCmpybsHk7exqL2Krwb/Oyo - fsWuQbjbN1kdBh7tVg/w0KR2jSvNnh9tLDX97b3qB0iN14cqD45kApF/LclDUy4xryj6+ - ij6YbvBQLWDiMMy47FhVCZQ5sfM6MYvaDA3DpJe7DZXOQFxZp9KGB50zAlVm+PssCNY/g - kBqBJA0qd0Nwixkku8trokwShUzVFAaWfPr9zj0/lAk5jFDz42NK5pfM0o0M84Duof+7u - wxp5H4wnwPXauDbC+LhI12s3J0teDnaXNwCTTPd+NWi1w50N0JH8nDVkz+g3cZA7mAW83 - cvrC3If9SyQZi/9GNphNzVPfG6llOeAkNno5f4CXoGxuoMgw17Px7HnCO50XPYLUfORzo - eLG2QHqbHIiMgIpy30YvVKRsvjLvKId6I2s51oj918U96mt/jef6Ct0Q5jybTs= diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index 702d0ea6b..000000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,19 +0,0 @@ -The source repository for this project can be found at: - - https://opendev.org/openstack/monasca-api.git - -Pull requests submitted through GitHub are not monitored. - -To start contributing to OpenStack, follow the steps in the contribution guide -to set up and use Gerrit: - - https://docs.openstack.org/contributors/code-and-documentation/quick-start.html - -Bugs should be filed on Storyboard: - -https://storyboard.openstack.org/#!/project/863 - -For more specific information about contributing to this repository, see the -Monasca contributor guide: - - https://docs.openstack.org/monasca-api/latest/contributor/contributing.html diff --git a/README.rst b/README.rst index 338fc0946..650e3f61e 100644 --- a/README.rst +++ b/README.rst @@ -1,139 +1,9 @@ -Team and repository tags -======================== +This project is no longer maintained. -.. image:: https://governance.openstack.org/tc/badges/monasca-api.svg - :target: https://governance.openstack.org/tc/reference/tags/index.html +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". -.. Change things from this point on - -Overview -======== - -``monasca-api`` is a RESTful API server that is designed with a `layered -architecture`_. - -Documentation -------------- - -The full API Specification can be found in `docs/monasca-api-spec.md`_ - -Python Monasca API Implementation -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To install the python api implementation, git clone the source and run -the following command: - -:: - - $ sudo python setup.py install - -If it installs successfully, you will need to make changes to the -following two files to reflect your system settings, especially where -kafka server is located: - -:: - - /etc/monasca/api-config.ini - /etc/monasca/monasca-api.conf - /etc/monasca/api-logging.conf - -Once the configuration files are modified to match your environment, you -can start up the server by following the following instructions. - -To start the server, run the following command: - -:: - - Running the server in foreground mode - $ gunicorn -k eventlet --worker-connections=2000 --backlog=1000 --paste /etc/monasca/api-config.ini - - Running the server as daemons - $ gunicorn -k eventlet --worker-connections=2000 --backlog=1000 --paste /etc/monasca/api-config.ini -D - -To check if the code follows python coding style, run the following -command from the root directory of this project - -:: - - $ tox -e pep8 - -To run all the unit test cases, run the following command from the root -directory of this project - -:: - - $ tox -e py36 - -Start the Server - for Apache -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To start the server using Apache: create a modwsgi file, create a -modwsgi configuration file, and enable the wsgi module in Apache. - -The modwsgi configuration file may look something like this, and the -site will need to be enabled: - -.. code:: apache - - Listen 8070 - - - - WSGIDaemonProcess monasca-api processes=4 threads=1 socket-timeout=120 user=mon-api group=monasca python-path=/usr/local/lib/python2.7/site-packages - WSGIProcessGroup monasca-api - WSGIApplicationGroup monasca-api - WSGIScriptAlias / /usr/local/lib/python2.7/site-packages/monasca_api/api/wsgi/monasca_api.py - - WSGIPassAuthorization On - - LogLevel info - ErrorLog /var/log/monasca-api/wsgi.log - CustomLog /var/log/monasca-api/wsgi-access.log combined - - - Require all granted - - - SetEnv no-gzip 1 - - - -The wsgi file may look something like this: - -.. code:: py - - - from monasca_api.api import server - - application = server.get_wsgi_app(config_base_path='/etc/monasca') - -Java Implementation -~~~~~~~~~~~~~~~~~~~ - -Details on usage can be found `here`_ - -WARNING: The Java implementation of Monasca API is DEPRECATED and will -be removed in future release. - -License -======= - -Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - -Licensed under the Apache License, Version 2.0 (the "License"); you may -not use this file except in compliance with the License. You may obtain -a copy of the License at - -:: - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -.. _layered architecture: https://en.wikipedia.org/wiki/Multilayered_architecture -.. _docs/monasca-api-spec.md: docs/monasca-api-spec.md -.. _here: /docs/java.md +For any further questions, please email openstack-discuss@lists.openstack.org +or join #openstack-dev on OFTC. \ No newline at end of file diff --git a/api-ref/locale/.gitkeep b/api-ref/locale/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/api-ref/source/conf.py b/api-ref/source/conf.py deleted file mode 100644 index 89db10cf1..000000000 --- a/api-ref/source/conf.py +++ /dev/null @@ -1,196 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Key Manager API documentation build configuration file -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.6' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'os_api_ref', - 'openstackdocstheme' -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General details about project -openstackdocs_repo_name = u'openstack/monasca-api' -openstackdocs_use_storyboard = True -copyright = u'2014-present, OpenStack Foundation' -author = u'OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'openstackdocs' - -# To use the API Reference sidebar dropdown menu, -# uncomment the html_theme_options parameter. The theme -# variable, sidebar_dropdown, should be set to `api_ref`. -# Otherwise, the list of links for the User and Ops docs -# appear in the sidebar dropdown menu. -html_theme_options = {"sidebar_dropdown": "api_ref", - "sidebar_mode": "toc"} - -# A shorter title for the navigation bar. Default is the same as html_title. -html_short_title = 'API Ref' - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'MonitoringApiRefDoc' - -# -- Options for LaTeX output --------------------------------------------- - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'MonitoringApiRef.tex', u'Monitoring Service API Reference', - [author], 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for Internationalization output ------------------------------ -locale_dirs = ['locale/'] diff --git a/api-ref/source/index.rst b/api-ref/source/index.rst deleted file mode 100644 index bb7ae082b..000000000 --- a/api-ref/source/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -:tocdepth: 2 - -.. - Copyright 2017 Fujitsu LIMITED - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -======================= -Monitoring Service APIs -======================= - -.. rest_expand_all:: diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index 15cd6cb76..000000000 --- a/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/bindep.txt b/bindep.txt deleted file mode 100644 index 2d95ccc5d..000000000 --- a/bindep.txt +++ /dev/null @@ -1,6 +0,0 @@ -# This is a cross-platform list tracking distribution packages needed for install and tests; -# see http://docs.openstack.org/infra/bindep/ for additional information. - -maven -openjdk-8-jdk -libssl-dev [platform:dpkg] diff --git a/common/build_common.sh b/common/build_common.sh deleted file mode 100755 index 03d3a88e6..000000000 --- a/common/build_common.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/sh -set -x -ME=`whoami` -echo "Running as user: $ME" -MVN=$1 -VERSION=$2 -BRANCH=$3 - -check_user() { - ME=$1 - if [ "${ME}" != "zuul" ]; then - echo "\nERROR: Download monasca-common and do a mvn install to install the monasca-commom jars\n" 1>&2 - exit 1 - fi -} - -BUILD_COMMON=false -POM_FILE=~/.m2/repository/monasca-common/monasca-common/${VERSION}/monasca-common-${VERSION}.pom -if [ ! -r "${POM_FILE}" ]; then - check_user "${ME}" - BUILD_COMMON=true -fi - -# This should only be done on the stack forge system -if [ "${BUILD_COMMON}" = "true" ]; then - git clone -b ${BRANCH} https://git.openstack.org/openstack/monasca-common --depth 1 - cd monasca-common - ${MVN} clean - ${MVN} install -fi diff --git a/config-generator/README.rst b/config-generator/README.rst deleted file mode 100644 index 916496812..000000000 --- a/config-generator/README.rst +++ /dev/null @@ -1,19 +0,0 @@ -================ -config-generator -================ - -To generate sample configuration file execute:: - - tox -e genconfig - -To generate the sample policies execute:: - - tox -e genpolicy - -After generation you will have sample available in -``etc/api-policy.yaml.sample``. It contains default values for all policies. -After you change it to suit your needs you will need to change monasca-api -configuration to look for the new policy configuration with specific file name. -Head to ``monasca-api.conf`` file and then you will need to replace -in ``[oslo_policy]`` section ``policy_file`` to your desired file name -(like ``api-policy.yaml``). diff --git a/config-generator/__init__.py b/config-generator/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/config-generator/monasca-api.conf b/config-generator/monasca-api.conf deleted file mode 100644 index cf185de94..000000000 --- a/config-generator/monasca-api.conf +++ /dev/null @@ -1,10 +0,0 @@ -[DEFAULT] -output_file = etc/monasca-api.conf.sample -wrap_width = 79 -format = ini -summarize = True -namespace = monasca_api -namespace = oslo.log -namespace = oslo.db -namespace = oslo.policy -namespace = keystonemiddleware.auth_token diff --git a/config-generator/policy.conf b/config-generator/policy.conf deleted file mode 100644 index 699cb3d0f..000000000 --- a/config-generator/policy.conf +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -output_file = etc/api-policy.yaml.sample -format = yaml -namespace = monasca_api diff --git a/contrib/post_test_hook.sh b/contrib/post_test_hook.sh deleted file mode 100755 index 106b8f0dd..000000000 --- a/contrib/post_test_hook.sh +++ /dev/null @@ -1,94 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# (C) Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -sleep 6 - -function load_devstack_utilities { - source $BASE/new/devstack/stackrc - source $BASE/new/devstack/functions - source $BASE/new/devstack/openrc admin admin -} - -function setup_monasca_api { - - local constraints="-c /opt/stack/new/requirements/upper-constraints.txt" - - pushd $TEMPEST_DIR - sudo -EH pip install $constraints -r requirements.txt -r test-requirements.txt - popd; - - pushd $MONASCA_API_DIR - sudo -EH pip install $constraints -r requirements.txt -r test-requirements.txt - sudo -EH python setup.py install - popd; -} - -function set_tempest_conf { - - local conf_file=$TEMPEST_DIR/etc/tempest.conf - pushd $TEMPEST_DIR - oslo-config-generator \ - --config-file tempest/cmd/config-generator.tempest.conf \ - --output-file $conf_file - popd - - cp -f $DEST/tempest/etc/logging.conf.sample $DEST/tempest/etc/logging.conf - - # set identity section - iniset $conf_file identity admin_domain_scope True - iniset $conf_file identity user_unique_last_password_count 2 - iniset $conf_file identity user_locakout_duration 5 - iniset $conf_file identity user_lockout_failure_attempts 2 - iniset $conf_file identity uri $OS_AUTH_URL/v2.0 - iniset $conf_file identity uri_v3 $OS_AUTH_URL/v3 - iniset $conf_file identity auth_version v$OS_IDENTITY_API_VERSION - # set auth section - iniset $conf_file auth use_dynamic_credentials True - iniset $conf_file auth admin_username $OS_USERNAME - iniset $conf_file auth admin_password $OS_PASSWORD - iniset $conf_file auth admin_domain_name $OS_PROJECT_DOMAIN_ID - iniset $conf_file auth admin_project_name $OS_PROJECT_NAME - -} - -function function_exists { - declare -f -F $1 > /dev/null -} - -if ! function_exists echo_summary; then - function echo_summary { - echo $@ - } -fi - -XTRACE=$(set +o | grep xtrace) -set -o xtrace - -echo_summary "monasca's post_test_hook.sh was called..." -(set -o posix; set) - -# save ref to monasca-api dir -export MONASCA_API_DIR="$BASE/new/monasca-api" -export TEMPEST_DIR="$BASE/new/tempest" - -sudo chown -R $USER:stack $MONASCA_API_DIR -sudo chown -R $USER:stack $TEMPEST_DIR - -load_devstack_utilities -setup_monasca_api -set_tempest_conf \ No newline at end of file diff --git a/devstack/README.md b/devstack/README.md deleted file mode 100644 index e956a6d72..000000000 --- a/devstack/README.md +++ /dev/null @@ -1,124 +0,0 @@ -# Monasca DevStack Plugin - -The Monasca DevStack plugin currently only works on Ubuntu 18.04 (Bionic). -More Linux Distributions will be supported in the future. - -Running the Monasca DevStack plugin requires a machine with 10GB of RAM. - -Directions for installing and running Devstack can be found here: - - https://docs.openstack.org/devstack/latest/ - -To run Monasca in DevStack, do the following three steps. - -1. Clone the DevStack repo. - -``` -git clone https://opendev.org/openstack/devstack -``` - -2. Add the following to the DevStack local.conf file in the root of the devstack directory. You may - need to create the local.conf if it does not already exist. - -``` -# BEGIN DEVSTACK LOCAL.CONF CONTENTS - -[[local|localrc]] -DATABASE_PASSWORD=secretdatabase -RABBIT_PASSWORD=secretrabbit -ADMIN_PASSWORD=secretadmin -SERVICE_PASSWORD=secretservice - -LOGFILE=$DEST/logs/stack.sh.log -LOGDIR=$DEST/logs -LOG_COLOR=False - -# The following variable allow switching between Java and Python for -# the implementations of the Monasca Persister. If this variable is not set, -# then the default is to install the Python implementation of -# the Monasca Persister. - -# Uncomment of the following two lines to choose Java or Python for -# the Monasca Pesister. -# MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-java} -MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-python} - -# Uncomment one of the following two lines to choose either InfluxDB or -# Apache Cassandra. -# default "influxdb" is selected as metric DB. -MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-influxdb} -# MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-cassandra} - -# This line will enable all of Monasca. -enable_plugin monasca-api https://opendev.org/openstack/monasca-api - -# END DEVSTACK LOCAL.CONF CONTENTS -``` - -3. Run './stack.sh' from the root of the devstack directory. - -If you want to run Monasca with the bare mininum of OpenStack components -you can add the following two lines to the local.conf file. - -``` -disable_all_services -enable_service rabbit mysql key -``` - -If you also want the Tempest tests to be installed then add `tempest` and - `monasca-tempest-plugin`. - -``` -enable_service rabbit mysql key tempest -enable_plugin monasca-tempest-plugin https://opendev.org/openstack/monasca-tempest-plugin -``` - -To enable Horizon and the Monasca UI add `horizon` - -``` -enable_service rabbit mysql key horizon tempest -``` - -# Using Vagrant - -Vagrant can be used to deploy a VM with Devstack and Monasca running in it using the Vagrantfile. After installing Vagrant, just run the command `vagrant up` as usual in the `../monasca-api/devstack` directory. - -To use local repositories in the devstack install, commit your changes to the master branch of the local repo, then modify the `_REPO` variable in the settings file that corresponds to the local repo to use ```file://my/local/repo/location```. -To use a local instance of the monasca-api repo, change the ```enable_plugin monasca-api https://opendev.org/openstack/monasca-api``` to ```enable_plugin monasca-api file://my/repo/is/here```. Both of these settings will only take effect on a rebuild of the devstack VM. - -## Enforcing Apache mirror - -If, for any reason, ```APACHE_MIRROR``` that is picked is not working, you can -enforce it in following way: - -```sh -APACHE_MIRROR=http://www-us.apache.org/dist/ -``` - -## Using WSGI - -Monasca-api can be deployed with Apache using uwsgi and gunicorn. -By default monasca-api runs under uwsgi. -If you wish to use gunicorn make sure that ```devstack/local.conf``` -contains: - -```sh -MONASCA_API_USE_MOD_WSGI=False -``` - -# License - -(c) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP -Copyright Fujitsu LIMITED 2017 - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/devstack/Vagrantfile b/devstack/Vagrantfile deleted file mode 100644 index a48eb4f92..000000000 --- a/devstack/Vagrantfile +++ /dev/null @@ -1,207 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -# All Vagrant configuration is done below. The "2" in Vagrant.configure -# configures the configuration version (we support older styles for -# backwards compatibility). Please don't change it unless you know what -# you're doing. -Vagrant.configure(2) do |config| - - if Vagrant.has_plugin?("vagrant-cachier") - config.cache.scope = :box - end - - # Handle local proxy settings - if Vagrant.has_plugin?("vagrant-proxyconf") - if ENV["http_proxy"] - config.proxy.http = ENV["http_proxy"] - end - if ENV["https_proxy"] - config.proxy.https = ENV["https_proxy"] - end - if ENV["no_proxy"] - config.proxy.no_proxy = ENV["no_proxy"] + ',192.168.10.6,10.0.2.15,127.0.0.1' - end - - end - - config.vm.hostname = "devstack" - config.vm.box = "bento/ubuntu-22.04" - config.vm.box_check_update = false - config.vm.network "private_network",ip:"192.168.10.6" - config.vm.synced_folder "~/", "/vagrant_home" - - config.vm.provider "virtualbox" do |vb| - vb.gui = false - vb.memory = "12800" - vb.cpus = 4 - # vb.customize ["modifyvm", :id, "--cpuexecutioncap", "50"] - end - - config.vm.provision "shell", privileged: false, inline: <<-SHELL - sudo apt-get update - sudo apt-get -y upgrade - sudo apt-get -y install git lnav - - if [ $http_proxy ]; then - git config --global url.https://git.openstack.org/.insteadOf https://git.openstack.org/ - sudo git config --global url.https://git.openstack.org/.insteadOf https://git.openstack.org/ - - protocol=`echo $http_proxy | awk -F: '{print $1}'` - host=`echo $http_proxy | awk -F/ '{print $3}' | awk -F: '{print $1}'` - port=`echo $http_proxy | awk -F/ '{print $3}' | awk -F: '{print $2}'` - - echo " - - - $host - true - $protocol - $host - $port - - - " > ./maven_proxy_settings.xml - - mkdir -p ~/.m2 - cp ./maven_proxy_settings.xml ~/.m2/settings.xml - - sudo mkdir -p /root/.m2 - sudo cp ./maven_proxy_settings.xml /root/.m2/settings.xml - fi - - git clone https://opendev.org/openstack/devstack.git -b master --depth 1 - - # If using vagrant-cachier, restore cached downloads of 3rd-party dependencies - if [ -d "/tmp/vagrant-cache" ]; then - if [ -d "/tmp/vagrant-cache/downloads" ]; then - echo "Restoring downloads" - cp /tmp/vagrant-cache/downloads/* devstack/files - fi - if [ -f "/tmp/vagrant-cache/pip-cache.tar.gz" ]; then - echo "Restoring ~/.cache" - tar xzf /tmp/vagrant-cache/pip-cache.tar.gz -C ~ - fi - if [ -f "/tmp/vagrant-cache/nvm-cache.tar.gz" ]; then - echo "Restoring ~/.nvm/.cache" - mkdir -p ~/.nvm - tar xzf /tmp/vagrant-cache/nvm-cache.tar.gz -C ~/.nvm - fi - if [ -f "/tmp/vagrant-cache/npm-pkgs.tar.gz" ]; then - echo "Restoring ~/.npm" - tar xzf /tmp/vagrant-cache/npm-pkgs.tar.gz -C ~ - fi - if [ -f "/tmp/vagrant-cache/root-pip-cache.tar.gz" ]; then - echo "Restoring ~root/.cache" - sudo tar xzf /tmp/vagrant-cache/root-pip-cache.tar.gz -C ~root - fi - if [ -f "/tmp/vagrant-cache/root-m2-cache.tar.gz" ]; then - echo "Restoring ~root/.m2" - sudo tar xzf /tmp/vagrant-cache/root-m2-cache.tar.gz -C ~root - fi - fi - - cd devstack - echo '[[local|localrc]] - -GIT_DEPTH=1 - -SERVICE_HOST=192.168.10.6 -HOST_IP=192.168.10.6 -HOST_IP_IFACE=eth1 - -DATABASE_PASSWORD=secretdatabase -RABBIT_PASSWORD=secretrabbit -ADMIN_PASSWORD=secretadmin -SERVICE_PASSWORD=secretservice - -LOGFILE=$DEST/logs/stack.sh.log -LOGDIR=$DEST/logs -LOG_COLOR=False - -DEST=/opt/stack - -disable_all_services -enable_service rabbit key tempest horizon - -# Enable more OpenStack services if neccessary: -# https://opendev.org/openstack/devstack/src/branch/master/stackrc#L56-L81 -# Nova - services to support libvirt based openstack clouds -# enable_service n-api n-cpu n-cond n-sch n-novnc n-cauth n-api-meta -# Placement and Glance services needed for Nova -# enable_service placement-api placement-client -# enable_service g-api g-reg -# Cinder, Neutron -# enable_service cinder c-api c-vol c-sch c-bak -# enable_service neutron q-svc q-agt q-dhcp q-meta q-l3 - -# Uncomment one of the following lines to use either MySQL or PostgreSQL -# as RDB (relational database) backend for monasca. -enable_service mysql -#enable_service postgresql - -# Enable entire storm for the sake of local development mode -enable_service monasca-storm-nimbus -enable_service monasca-storm-supervisor -enable_service monasca-storm-ui -enable_service monasca-storm-logviewer -enable_service monasca-log - - -# Uncomment this line to disable log part -# disable_service monasca-log - -# Enable/Disable ORM support for mysql/postgresql -# HINT: If postgresql service is enabled, ORM is enforced -MONASCA_DATABASE_USE_ORM=${MONASCA_DATABASE_USE_ORM:-false} - -# The following two variables allow switching between Java and Python for the implementations -# of the Monasca API and the Monasca Persister. If these variables are not set, then the -# default is to install the Python implementations of both the Monasca API and the Monasca Persister. - -# Uncomment one of the following two lines to choose Java or Python for the Monasca API. -# MONASCA_API_IMPLEMENTATION_LANG=${MONASCA_API_IMPLEMENTATION_LANG:-java} -MONASCA_API_IMPLEMENTATION_LANG=${MONASCA_API_IMPLEMENTATION_LANG:-python} - -# Uncomment one of the following two lines to choose Java or Python for the Monasca Pesister. -# MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-java} -MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-python} - -# Uncomment one of the following three lines to choose either InfluxDB, Vertica or Cassandra. -# MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-vertica} -# MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-cassandra} -MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-influxdb} - -MONASCA_API_USE_MOD_WSGI=${MONASCA_API_USE_MOD_WSGI:-True} - -# Uncomment to stack devstack with old log-api -# USE_OLD_LOG_API=true -# enable_service monasca-log-api - -# Start devstack with services running under Python 3 -USE_PYTHON3=True - - -# Uncomment one of the following lines and modify accordingly to enable the Monasca DevStack Plugin -enable_plugin monasca-api https://opendev.org/openstack/monasca-api -# enable_plugin monasca-api file:///vagrant_home/Documents/repos/openstack/monasca-api.vertica - -# Uncomment to install tempest tests -enable_plugin monasca-tempest-plugin https://opendev.org/openstack/monasca-tempest-plugin - -' > local.conf - sudo apt purge -y python3-simplejson - ./stack.sh - # Cache downloaded files for future runs - if [ -d "/tmp/vagrant-cache" ]; then - mkdir -p /tmp/vagrant-cache/downloads - cp files/*gz files/*.deb /tmp/vagrant-cache/downloads - tar czf /tmp/vagrant-cache/pip-cache.tar.gz -C ~ .cache - tar czf /tmp/vagrant-cache/nvm-cache.tar.gz -C ~/.nvm .cache - tar czf /tmp/vagrant-cache/npm-pkgs.tar.gz -C ~ .npm - sudo tar czf /tmp/vagrant-cache/root-pip-cache.tar.gz -C ~root .cache - sudo tar czf /tmp/vagrant-cache/root-m2-cache.tar.gz -C ~root .m2 - fi - SHELL - -end diff --git a/devstack/files/cassandra/monasca_schema.cql b/devstack/files/cassandra/monasca_schema.cql deleted file mode 100644 index 969b94ee8..000000000 --- a/devstack/files/cassandra/monasca_schema.cql +++ /dev/null @@ -1,93 +0,0 @@ -// (C) Copyright 2017 SUSE LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -//   http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -// implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// version 1.0 - -drop schema if exists monasca; - -// replication factor is set to 1 for devstack installation - -create schema monasca with replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }; - -create table monasca.measurements ( - metric_id blob, - region text static, - tenant_id text static, - metric_name text static, - dimensions frozen> static, - time_stamp timestamp, - value double, - value_meta text, - primary key (metric_id, time_stamp) -) -WITH CLUSTERING ORDER BY (time_stamp ASC); - -create table monasca.metrics ( - region text, - tenant_id text, - metric_name text, - dimensions frozen>, - dimension_names frozen>, - metric_id blob, - created_at timestamp, - updated_at timestamp, - primary key ((region, tenant_id, metric_name), dimensions, dimension_names) -); - -CREATE CUSTOM INDEX metrics_created_at_index ON monasca.metrics (created_at) -USING 'org.apache.cassandra.index.sasi.SASIIndex'; - -CREATE CUSTOM INDEX metrics_updated_at_index ON monasca.metrics (updated_at) -USING 'org.apache.cassandra.index.sasi.SASIIndex'; - -create table monasca.dimensions ( - region text, - tenant_id text, - name text, - value text, - primary key ((region, tenant_id, name), value) -); - -create table monasca.dimensions_metrics ( - region text, - tenant_id text, - dimension_name text, - dimension_value text, - metric_name text, - primary key ((region, tenant_id, dimension_name, dimension_value), metric_name) -); - -create table monasca.metrics_dimensions ( - region text, - tenant_id text, - dimension_name text, - dimension_value text, - metric_name text, - primary key ((region, tenant_id, metric_name), dimension_name, dimension_value) -); - -create table monasca.alarm_state_history ( - tenant_id text, - alarm_id text, - time_stamp timestamp, - metric text, - old_state text, - new_state text, - reason text, - reason_data text, - sub_alarms text, - primary key ((tenant_id, alarm_id), time_stamp) -); - diff --git a/devstack/files/debs/monasca-api b/devstack/files/debs/monasca-api deleted file mode 100644 index f9b04280a..000000000 --- a/devstack/files/debs/monasca-api +++ /dev/null @@ -1,9 +0,0 @@ -openjdk-8-jdk # dist:xenial,bionic,focal,jammy,noble -openjdk-8-jre-headless # dist:bionic,focal,jammy,noble -maven # dist:xenial,bionic,focal,jammy,noble -jq # dist:xenial,bionic,focal,jammy,noble -python-dev # dist:xenial,bionic,focal -python3-dev # dist:jammy,noble -build-essential # dist:xenial,bionic,focal,jammy,noble -mailutils # dist:xenial,bionic,focal,jammy,noble -python-is-python3 # dist:focal,jammy,noble \ No newline at end of file diff --git a/devstack/files/elasticsearch/elasticsearch.yml b/devstack/files/elasticsearch/elasticsearch.yml deleted file mode 100644 index cb72492ec..000000000 --- a/devstack/files/elasticsearch/elasticsearch.yml +++ /dev/null @@ -1,88 +0,0 @@ -# ======================== Elasticsearch Configuration ========================= -# -# NOTE: Elasticsearch comes with reasonable defaults for most settings. -# Before you set out to tweak and tune the configuration, make sure you -# understand what are you trying to accomplish and the consequences. -# -# The primary way of configuring a node is via this file. This template lists -# the most important settings you may want to configure for a production cluster. -# -# Please consult the documentation for further information on configuration options: -# https://www.elastic.co/guide/en/elasticsearch/reference/index.html -# -# ---------------------------------- Cluster ----------------------------------- -# -# Use a descriptive name for your cluster: -# -cluster.name: monasca_elastic -# -# ------------------------------------ Node ------------------------------------ -# -# Use a descriptive name for the node: -# -#node.name: node-1 -# -# Add custom attributes to the node: -# -#node.attr.rack: r1 -# -# ----------------------------------- Paths ------------------------------------ -# -# Path to directory where to store the data (separate multiple locations by comma): -# -path.data: %ES_DATA_DIR% -# -# Path to log files: -# -path.logs: %ES_LOG_DIR% -# -# ----------------------------------- Memory ----------------------------------- -# -# Lock the memory on startup: -# -#bootstrap.memory_lock: true -# -# Make sure that the heap size is set to about half the memory available -# on the system and that the owner of the process is allowed to use this -# limit. -# -# Elasticsearch performs poorly when the system is swapping the memory. -# -# ---------------------------------- Network ----------------------------------- -# -# Set the bind address to a specific IP (IPv4 or IPv6): -# -network.host: %ES_SERVICE_BIND_HOST% -# -# Set a custom port for HTTP: -# -http.port: %ES_SERVICE_BIND_PORT% -# -# For more information, consult the network module documentation. -# -# --------------------------------- Discovery ---------------------------------- -# -# Pass an initial list of hosts to perform discovery when this node is started: -# The default list of hosts is ["127.0.0.1", "[::1]"] -# -#discovery.seed_hosts: ["host1", "host2"] -# -# Bootstrap the cluster using an initial set of master-eligible nodes: -# -cluster.initial_master_nodes: ["%ES_SERVICE_BIND_HOST%"] -# -# For more information, consult the discovery and cluster formation module documentation. -# -# ---------------------------------- Gateway ----------------------------------- -# -# Block initial recovery after a full cluster restart until N nodes are started: -# -#gateway.recover_after_nodes: 3 -# -# For more information, consult the gateway module documentation. -# -# ---------------------------------- Various ----------------------------------- -# -# Require explicit names when deleting indices: -# -#action.destructive_requires_name: true \ No newline at end of file diff --git a/devstack/files/grafana/dashboards.d/08-openstack.json b/devstack/files/grafana/dashboards.d/08-openstack.json deleted file mode 100644 index 6edc5d2b6..000000000 --- a/devstack/files/grafana/dashboards.d/08-openstack.json +++ /dev/null @@ -1,988 +0,0 @@ -{ - "id": null, - "title": "Openstack Dashboard", - "originalTitle": "Openstack Dashboard", - "tags": [], - "style": "light", - "timezone": "browser", - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "editable": true, - "hideControls": false, - "sharedCrosshair": false, - "rows": [ - { - "title": "OpenStack Health", - "height": "100px", - "editable": true, - "collapse": false, - "collapsable": true, - "panels": [ - { - "title": "compute (Nova)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 1, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "compute"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - }, - { - "title": "networking (Neutron)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 4, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "networking"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - }, - { - "title": "image-service (Glance)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 5, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "image-service"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - }, - { - "title": "block-storage (Cinder)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 7, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "block-storage"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - }, - { - "title": "object-storage (Swift)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 8, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "dimensions": [{"key": "service", "value": "object-storage"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - }, - { - "title": "identity-service (Keystone)", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 12, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "dimensions": [{"key": "service", "value": "identity-service"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - }, - "height": "100" - } - ], - "notice": false, - "showTitle": true - }, - { - "title": "System Resources", - "height": "250px", - "editable": true, - "collapse": false, - "collapsable": true, - "panels": [ - { - "span": 6, - "editable": true, - "type": "graph", - "loadingEditor": false, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "scale": 1, - "y_formats": [ - "percent", - "none" - ], - "grid": { - "max": null, - "min": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)", - "leftMax": 100, - "rightMax": null, - "leftMin": 0, - "rightMin": null - }, - "annotate": { - "enable": false - }, - "resolution": 100, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true, - "rightSide": false - }, - "percentage": false, - "zerofill": true, - "nullPointMode": "null", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "query_as_alias": true, - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "cpu.percent", - "condition_filter": false, - "alias": "cpu.percent (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.wait_perc", - "alias": "cpu.wait_perc (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.system_perc", - "alias": "cpu.system_perc (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.user_perc", - "alias": "cpu.user_perc (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - } - ], - "aliasColors": {}, - "title": "CPU usage", - "id": 2, - "seriesOverrides": [], - "links": [], - "leftYAxisLabel": "" - }, - { - "title": "Memory usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 13, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "none" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)", - "thresholdLine": false - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true, - "rightSide": false - }, - "nullPointMode": "null", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "mem.total_mb", - "merge": false, - "alias": "mem.total_mb (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.swap_used_mb", - "alias": "mem.swap_used_mb (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.used_mb", - "alias": "mem.used_mb (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.swap_total_mb", - "alias": "mem.swap_total_mb (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.used_cache", - "alias": "mem.used_cache (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [], - "leftYAxisLabel": "MB" - }, - { - "title": "Disk usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 14, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "percent", - "short" - ], - "grid": { - "leftMax": 100, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true, - "rightSide": false, - "sortDesc": null, - "sort": null - }, - "nullPointMode": "null", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "disk.space_used_perc", - "merge": false, - "dimensions": [ - { - "key": "hostname", - "value": "$all" - }, - { - "key": "device", - "value": "rootfs" - } - ], - "alias": "disk.space_used_perc (@hostname @device)" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "disk.space_used_perc", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - }, - { - "key": "mount_point", - "value": "/boot" - } - ], - "alias": "disk.space_used_perc (@hostname @mount_point)" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "disk.space_used_perc", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - }, - { - "key": "mount_point", - "value": "/srv/node/swiftloopback" - } - ], - "alias": "disk.space_used_perc (@hostname @mount_point)" - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - }, - { - "title": "System load", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 15, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "null", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "load.avg_1_min", - "alias": "load.avg_1_min (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "load.avg_5_min", - "alias": "load.avg_5_min (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "load.avg_15_min", - "alias": "load.avg_15_min (@hostname)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - } - ] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - } - ], - "notice": false, - "showTitle": true - }, - { - "title": "Network monitoring", - "height": "250px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "Network usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 16, - - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "bps", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "net.in_bytes_sec", - "merge": true, - "alias": "net.in_bytes_sec (@hostname @device)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - }, - { - "key": "device", - "value": "$all" - } - ] - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "merge": true, - "metric": "net.out_bytes_sec", - "alias": "net.out_bytes_sec (@hostname @device)", - "dimensions": [ - { - "key": "hostname", - "value": "$all" - }, - { - "key": "device", - "value": "$all" - } - ] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - } - ], - "showTitle": true - } - ], - "nav": [ - { - "type": "timepicker", - "collapse": false, - "notice": false, - "enable": true, - "status": "Stable", - "time_options": [ - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ], - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "now": true - } - ], - "time": { - "from": "now-1h", - "to": "now" - }, - "templating": { - "list": [], - "enable": false - }, - "annotations": { - "enable": false, - "list": [] - }, - "refresh": "30s", - "version": 6, - "hideAllLegends": false -} diff --git a/devstack/files/grafana/dashboards.d/09-monasca.json b/devstack/files/grafana/dashboards.d/09-monasca.json deleted file mode 100644 index 6d8427dab..000000000 --- a/devstack/files/grafana/dashboards.d/09-monasca.json +++ /dev/null @@ -1,1603 +0,0 @@ -{ - "id": null, - "title": "Monasca Monitoring", - "originalTitle": "Monasca Monitoring", - "tags": [], - "style": "dark", - "timezone": "utc", - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "editable": true, - "hideControls": false, - "sharedCrosshair": false, - "rows": [ - { - "title": "Monasca Health - metrics", - "height": "100px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "Metrics API", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 19, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "uwsgi"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "0.0,0.2,1.0", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "Storm", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 44, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "storm"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "Persister", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 21, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "persister"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "Metrics DB", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 51, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "http_status", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "influxdb"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "Notification Engine", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 42, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "monasca-notification"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - } - ], - "showTitle": true - }, - { - "title": "Monasca Health - Common", - "height": "100px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "Kafka", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 38, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "kafka"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "ZooKeeper", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 48, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "zookeeper"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "MariaDB", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 66, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "mysqld"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - }, - { - "title": "Statsd", - "error": false, - "span": 2, - "editable": true, - "type": "singlestat", - "id": 166, - "links": [], - "maxDataPoints": 100, - "interval": null, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "monasca-statsd"}] - } - ], - "cacheTimeout": null, - "format": "none", - "prefix": "", - "postfix": "", - "nullText": null, - "valueMaps": [ - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - } - ], - "nullPointMode": "connected", - "valueName": "current", - "prefixFontSize": "50%", - "valueFontSize": "80%", - "postfixFontSize": "50%", - "thresholds": "-1.0,0.2,0.8", - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "sparkline": { - "show": false, - "full": false, - "lineColor": "rgb(31, 120, 193)", - "fillColor": "rgba(31, 118, 189, 0.18)" - } - } - ], - "showTitle": true - }, - { - "title": "System resources", - "height": "250px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "CPU usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 23, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "percent", - "short" - ], - "grid": { - "leftMax": 100, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": false - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "cpu.percent", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.user_perc", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.system_perc", - "condition_filter": true - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "cpu.wait_perc", - "condition_filter": true - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - }, - { - "title": "Memory usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 24, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "individual", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "mem.total_mb", - "condition_filter": true - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.used_mb", - "period": "", - "condition_filter": true - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.swap_total_mb", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.swap_used_mb", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "mem.used_cache", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [], - "leftYAxisLabel": "MB" - }, - { - "title": "Disk usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 25, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "percent", - "short" - ], - "grid": { - "leftMax": 100, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "disk.space_used_perc", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring", - "dimensions": [ - { - "key": "mount_point", - "value": "/boot" - } - ] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - }, - { - "title": "System load", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 26, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "load.avg_1_min", - "period": "", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "aggregator": "none", - "column": "value", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring", - "metric": "load.avg_5_min", - "period": "300", - "refId": "B", - "dimensions": [], - "error": "" - }, - { - "aggregator": "none", - "column": "value", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring", - "metric": "load.avg_15_min", - "period": "300", - "refId": "C", - "dimensions": [], - "error": "" - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - } - ], - "showTitle": true - }, - { - "title": "Network Monitoring", - "height": "250px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "Network usage", - "error": false, - "span": 6, - "editable": true, - "type": "graph", - "id": 61, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "bps", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "net.in_bytes_sec", - "merge": true, - "dimensions": "", - "period": "", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - }, - { - "target": "", - "aggregator": "none", - "column": "value", - "metric": "net.out_bytes_sec", - "merge": true, - "dimensions": "", - "period": "", - "condition_filter": true, - "condition_key": "service", - "condition_value": "monitoring" - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - } - ], - "showTitle": true - }, - { - "title": "Kafka", - "height": "250px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "CPU usage", - "error": false, - "span": 4, - "editable": true, - "type": "graph", - "id": 60, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "percent", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.cpu_perc", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "kafka"}] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - }, - { - "title": "Allocated memory", - "error": false, - "span": 4, - "editable": true, - "type": "graph", - "id": 59, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.mem.rss_mbytes", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "kafka"}] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "leftYAxisLabel": "MB", - "links": [] - }, - { - "title": "PID count", - "error": false, - "span": 4, - "editable": true, - "type": "graph", - "id": 89, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": true, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "process.pid_count", - "condition_filter": true, - "dimensions": [{"key": "service", "value": "kafka"}] - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - }, - { - "title": "Consumer lag", - "error": false, - "span": 12, - "editable": true, - "type": "graph", - "id": 58, - "datasource": null, - "renderer": "flot", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "none", - "short" - ], - "grid": { - "leftMax": null, - "rightMax": null, - "leftMin": 0, - "rightMin": null, - "threshold1": null, - "threshold2": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "lines": true, - "fill": 0, - "linewidth": 1, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false, - "alignAsTable": true - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true - }, - "targets": [ - { - "aggregator": "none", - "column": "value", - "metric": "kafka.consumer_lag", - "alias": "kafka.consumer_lag: @consumer_group", - "refId": "A", - "period": "300", - "dimensions": [ - { - "key": "consumer_group", - "value": "$all" - } - ], - "error": "" - } - ], - "aliasColors": {}, - "seriesOverrides": [], - "links": [] - } - ], - "showTitle": true - }, - { - "title": "Components", - "panels": [ - { - "id": 167, - "title": "Common", - "span": 4, - "type": "dashlist", - "query": "", - "limit": 10, - "tags": [ - "common" - ], - "recent": false, - "search": true, - "starred": false, - "headings": false, - "links": [] - }, - { - "id": 168, - "title": "Metrics", - "span": 4, - "type": "dashlist", - "query": "", - "limit": 10, - "tags": [ - "metrics" - ], - "recent": false, - "search": true, - "starred": false, - "headings": false, - "links": [] - } - ] - } - ], - "time": { - "from": "now-1h", - "to": "now" - }, - "templating": { - "list": [], - "enable": false - }, - "annotations": { - "enable": false, - "list": [] - }, - "refresh": "30s", - "version": 6, - "hideAllLegends": false -} diff --git a/devstack/files/grafana/dashboards.d/11-maradb.json b/devstack/files/grafana/dashboards.d/11-maradb.json deleted file mode 100644 index f52b13065..000000000 --- a/devstack/files/grafana/dashboards.d/11-maradb.json +++ /dev/null @@ -1,709 +0,0 @@ -{ - "id": null, - "title": "MariaDB", - "tags": [ - "common" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 15, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(245, 54, 54, 0.9)", - "rgba(237, 129, 40, 0.89)", - "rgba(50, 172, 45, 0.97)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "mysqld" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "MariaDB", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "mysqld" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "mysqld" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.data_reads", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.data_writes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Data Read/Write", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 7, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "mysqld" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "mysqld" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.buffer_pool_free", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.buffer_pool_total", - "period": "300", - "refId": "B" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.buffer_pool_used", - "period": "300", - "refId": "C" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Buffer Pool Size", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.row_lock_time", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "mysql.innodb.row_lock_waits", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Lock", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "ms", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/12-api.json b/devstack/files/grafana/dashboards.d/12-api.json deleted file mode 100644 index bf520f27e..000000000 --- a/devstack/files/grafana/dashboards.d/12-api.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Monasca API", - "tags": [ - "metrics" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "uwsgi" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Monasca API", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-api" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/13-storm.json b/devstack/files/grafana/dashboards.d/13-storm.json deleted file mode 100644 index ce39304c3..000000000 --- a/devstack/files/grafana/dashboards.d/13-storm.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Storm", - "tags": [ - "metrics" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "storm" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Storm", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "storm" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/14-persister.json b/devstack/files/grafana/dashboards.d/14-persister.json deleted file mode 100644 index 993c99369..000000000 --- a/devstack/files/grafana/dashboards.d/14-persister.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Persister", - "tags": [ - "metrics" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "persister" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Persister", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "persister" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/15-influxdb.json b/devstack/files/grafana/dashboards.d/15-influxdb.json deleted file mode 100644 index 849226c5e..000000000 --- a/devstack/files/grafana/dashboards.d/15-influxdb.json +++ /dev/null @@ -1,564 +0,0 @@ -{ - "id": null, - "title": "Metrics DB", - "tags": [ - "metrics" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(71, 212, 59, 0.4)", - "rgba(245, 150, 40, 0.73)", - "rgba(225, 40, 40, 0.59)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [{"key": "service", "value": "influxdb"}], - "error": "", - "metric": "http_status", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "InfluxDB", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "UP" - }, - { - "value": "1", - "op": "=", - "text": "DOWN" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "influxd" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/16-zookeper.json b/devstack/files/grafana/dashboards.d/16-zookeper.json deleted file mode 100644 index 1a48cfb3e..000000000 --- a/devstack/files/grafana/dashboards.d/16-zookeper.json +++ /dev/null @@ -1,760 +0,0 @@ -{ - "id": null, - "title": "Zookeper", - "tags": [ - "common" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 15, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(245, 54, 54, 0.9)", - "rgba(237, 129, 40, 0.89)", - "rgba(50, 172, 45, 0.97)" - ], - "datasource": null, - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 2, - "interval": null, - "links": [], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Zookeper", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "op": "=", - "text": "UP", - "value": "1" - }, - { - "op": "=", - "text": "UP", - "value": "2" - }, - { - "op": "=", - "text": "UP", - "value": "3" - }, - { - "op": "=", - "text": "UP", - "value": "4" - }, - { - "op": "=", - "text": "UP", - "value": "5" - }, - { - "op": "=", - "text": "DOWN", - "value": "0" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "mbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 7, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "zookeeper" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 8, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "zookeeper.avg_latency_sec", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "zookeeper.max_latency_sec", - "period": "300", - "refId": "B" - }, - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "zookeeper.min_latency_sec", - "period": "300", - "refId": "C" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Latency", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "s", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 9, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [], - "error": "", - "metric": "zookeeper.connections_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Conections count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/17-notification.json b/devstack/files/grafana/dashboards.d/17-notification.json deleted file mode 100644 index 29f1b6043..000000000 --- a/devstack/files/grafana/dashboards.d/17-notification.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Notification Engine", - "tags": [ - "metrics" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Notification", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-notification" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/18-kafka.json b/devstack/files/grafana/dashboards.d/18-kafka.json deleted file mode 100644 index 7874cc752..000000000 --- a/devstack/files/grafana/dashboards.d/18-kafka.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Kafka", - "tags": [ - "common" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "kafka" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Kafka", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kafka" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/19-statsd.json b/devstack/files/grafana/dashboards.d/19-statsd.json deleted file mode 100644 index 9074b21d7..000000000 --- a/devstack/files/grafana/dashboards.d/19-statsd.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Monasca statsd", - "tags": [ - "common" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Monasca statsd", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "monasca-statsd" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/20-kibana.json b/devstack/files/grafana/dashboards.d/20-kibana.json deleted file mode 100644 index 712afb006..000000000 --- a/devstack/files/grafana/dashboards.d/20-kibana.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Kibana", - "tags": [ - "logs" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "kibana" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Kibana", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "kibana" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/22-logtransformer.json b/devstack/files/grafana/dashboards.d/22-logtransformer.json deleted file mode 100644 index dd9dae5c6..000000000 --- a/devstack/files/grafana/dashboards.d/22-logtransformer.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Log Transformer", - "tags": [ - "logs" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Log Transformer", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-transformer" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/23-logtmetrics.json b/devstack/files/grafana/dashboards.d/23-logtmetrics.json deleted file mode 100644 index eb44a73dd..000000000 --- a/devstack/files/grafana/dashboards.d/23-logtmetrics.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Log Metrics", - "tags": [ - "logs" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Log Metrics", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-metrics" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/dashboards.d/24-logpersister.json b/devstack/files/grafana/dashboards.d/24-logpersister.json deleted file mode 100644 index 28a1adec1..000000000 --- a/devstack/files/grafana/dashboards.d/24-logpersister.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "id": null, - "title": "Log Persister", - "tags": [ - "logs" - ], - "style": "dark", - "timezone": "browser", - "editable": true, - "sharedCrosshair": false, - "hideControls": false, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 7, - "links": [], - "gnetId": null, - "rows": [ - { - "title": "Dashboard Row", - "panels": [ - { - "cacheTimeout": null, - "colorBackground": true, - "colorValue": false, - "colors": [ - "rgba(225, 40, 40, 0.59)", - "rgba(245, 150, 40, 0.73)", - "rgba(71, 212, 59, 0.4)" - ], - "format": "none", - "gauge": { - "maxValue": 100, - "minValue": 0, - "show": false, - "thresholdLabels": false, - "thresholdMarkers": true - }, - "id": 1, - "interval": null, - "links": [], - "mappingType": 1, - "mappingTypes": [ - ], - "maxDataPoints": 100, - "nullPointMode": "connected", - "nullText": null, - "postfix": "", - "postfixFontSize": "50%", - "prefix": "", - "prefixFontSize": "50%", - "span": 4, - "sparkline": { - "fillColor": "rgba(31, 118, 189, 0.18)", - "full": false, - "lineColor": "rgb(31, 120, 193)", - "show": false - }, - "targets": [ - { - "aggregator": "none", - "dimensions": [ - { - "key": "service", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.pid_count", - "period": "300", - "refId": "A" - } - ], - "thresholds": "0.2,0.8", - "title": "Log Persister", - "type": "singlestat", - "valueFontSize": "80%", - "valueMaps": [ - { - "value": "0", - "op": "=", - "text": "DOWN" - }, - { - "value": "1", - "op": "=", - "text": "UP" - }, - { - "value": "2", - "op": "=", - "text": "UP" - }, - { - "value": "3", - "op": "=", - "text": "UP" - }, - { - "value": "4", - "op": "=", - "text": "UP" - }, - { - "value": "5", - "op": "=", - "text": "UP" - }, - { - "value": "6", - "op": "=", - "text": "UP" - }, - { - "value": "7", - "op": "=", - "text": "UP" - }, - { - "value": "8", - "op": "=", - "text": "UP" - }, - { - "value": "9", - "op": "=", - "text": "UP" - }, - { - "value": "10", - "op": "=", - "text": "UP" - }, - { - "value": "11", - "op": "=", - "text": "UP" - }, - { - "value": "12", - "op": "=", - "text": "UP" - } - ], - "valueName": "current" - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.cpu_perc", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "CPU", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.mem.rss_mbytes", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "bytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": "250px", - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.io.read_count", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.io.write_count", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Count", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.io.read_kbytes", - "period": "300", - "refId": "A" - }, - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.io.write_kbytes", - "period": "300", - "refId": "B" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "IO Read/Write [kB]", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "kbytes", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - }, - { - "title": "Dashboard Row", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "fill": 1, - "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "dimensions": [ - { - "key": "process_name", - "value": "log-persister" - } - ], - "error": "", - "metric": "process.open_file_descriptors", - "period": "300", - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Open File Descriptors", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "showTitle": false, - "titleSize": "h6", - "height": 250, - "repeat": null, - "repeatRowId": null, - "repeatIteration": null, - "collapse": false - } - ] -} diff --git a/devstack/files/grafana/grafana-init.py b/devstack/files/grafana/grafana-init.py deleted file mode 100644 index adb824874..000000000 --- a/devstack/files/grafana/grafana-init.py +++ /dev/null @@ -1,176 +0,0 @@ -# coding=utf-8 - -# (C) Copyright 2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import glob -import json -import logging -import os -import sys -import time - -from requests import RequestException -from requests import Session - -LOG_LEVEL = logging.getLevelName(os.environ.get('LOG_LEVEL', 'INFO')) -logging.basicConfig(level=LOG_LEVEL) - -logger = logging.getLogger(__name__) - -GRAFANA_URL = os.environ.get('GRAFANA_URL', 'http://localhost:3000') -GRAFANA_USERNAME = os.environ.get('GRAFANA_USERNAME', 'mini-mon') -GRAFANA_PASSWORD = os.environ.get('GRAFANA_PASSWORD', 'password') -GRAFANA_USERS = [{'user': GRAFANA_USERNAME, 'password': GRAFANA_PASSWORD, 'email': ''}] - -DATASOURCE_NAME = os.environ.get('DATASOURCE_NAME', 'monasca') -DATASOURCE_URL = os.environ.get('DATASOURCE_URL', 'http://localhost/metrics') -DATASOURCE_ACCESS_MODE = os.environ.get('DATASOURCE_ACCESS_MODE', 'proxy') -DATASOURCE_AUTH = os.environ.get('DATASOURCE_AUTH', 'Keystone').capitalize() -DATASOURCE_AUTH_TOKEN = os.environ.get('DATASOURCE_AUTH_TOKEN', '') - -DASHBOARDS_DIR = os.environ.get('DASHBOARDS_DIR', '/dashboards.d') - - -def retry(retries=5, delay=2.0, exc_types=(RequestException,)): - def decorator(func): - def f_retry(*args, **kwargs): - for i in range(retries): - try: - return func(*args, **kwargs) - except exc_types as exc: - if i < retries - 1: - logger.debug('Caught exception, retrying...', - exc_info=True) - time.sleep(delay) - else: - logger.exception('Failed after %d attempts', retries) - if isinstance(exc, RequestException): - logger.debug('Response was: %r', exc.response.text) - - raise - return f_retry - return decorator - - -def create_login_payload(): - if os.environ.get('GRAFANA_USERS'): - try: - json.loads(os.environ.get('GRAFANA_USERS')) - except ValueError: - print("Invalid type GRAFANA_USERS") - raise - grafana_users = json.loads(os.environ.get('GRAFANA_USERS')) - else: - grafana_users = GRAFANA_USERS - return grafana_users - - -@retry(retries=24, delay=5.0) -def login(session, user): - r = session.post('{url}/login'.format(url=GRAFANA_URL), - json=user, - timeout=5) - r.raise_for_status() - - -@retry(retries=12, delay=5.0) -def check_initialized(session): - r = session.get('{url}/api/datasources'.format(url=GRAFANA_URL), timeout=5) - r.raise_for_status() - - logging.debug('existing datasources = %r', r.json()) - - for datasource in r.json(): - if datasource['name'] == DATASOURCE_NAME: - return True - - return False - - -def create_datasource_payload(): - payload = { - 'name': DATASOURCE_NAME, - 'url': DATASOURCE_URL, - 'access': DATASOURCE_ACCESS_MODE, - 'isDefault': True, - } - - if DATASOURCE_AUTH not in ['Keystone', 'Horizon', 'Token']: - logger.error('Unknown Keystone authentication option: %s', - DATASOURCE_AUTH) - sys.exit(1) - - keystone_auth = False - if DATASOURCE_AUTH in ['Keystone']: - keystone_auth = True - - payload.update({ - 'monasca': { - 'type': 'monasca-datasource', - 'jsonData': { - 'authMode': DATASOURCE_AUTH, - 'keystoneAuth': keystone_auth, - 'token': DATASOURCE_AUTH_TOKEN, - } - } - }.get(DATASOURCE_NAME, {})) - - logging.debug('payload = %r', payload) - - return payload - - -def create_dashboard_payload(json_path): - with open(json_path, 'r') as f: - dashboard = json.load(f) - dashboard['id'] = None - - return { - 'dashboard': dashboard, - 'overwrite': False - } - - -def main(): - for user in create_login_payload(): - logging.info('Opening a Grafana session...') - session = Session() - login(session, user) - - if check_initialized(session): - logging.info('Grafana has already been initialized, skipping!') - return - - logging.info('Attempting to add configured datasource...') - r = session.post('{url}/api/datasources'.format(url=GRAFANA_URL), - json=create_datasource_payload()) - logging.debug('Response: %r', r.json()) - r.raise_for_status() - - for path in sorted(glob.glob('{dir}/*.json'.format(dir=DASHBOARDS_DIR))): - logging.info('Creating dashboard from file: {path}'.format(path=path)) - r = session.post('{url}/api/dashboards/db'.format(url=GRAFANA_URL), - json=create_dashboard_payload(path)) - logging.debug('Response: %r', r.json()) - r.raise_for_status() - - logging.info('Ending %r session...', user.get('user')) - session.get('{url}/logout'.format(url=GRAFANA_URL)) - - logging.info('Finished successfully.') - - -if __name__ == '__main__': - main() diff --git a/devstack/files/grafana/grafana-server b/devstack/files/grafana/grafana-server deleted file mode 100755 index 27c2a1b86..000000000 --- a/devstack/files/grafana/grafana-server +++ /dev/null @@ -1,146 +0,0 @@ -#! /usr/bin/env bash - -# chkconfig: 2345 80 05 -# description: Grafana web server & backend -# processname: grafana-server -# config: /etc/grafana/grafana.ini -# pidfile: /var/run/grafana-server.pid - -### BEGIN INIT INFO -# Provides: grafana-server -# Required-Start: $all -# Required-Stop: $remote_fs $syslog -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Start grafana at boot time -### END INIT INFO - -# tested on -# 1. New lsb that define start-stop-daemon -# 3. Centos with initscripts package installed - -PATH=/bin:/usr/bin:/sbin:/usr/sbin -NAME=grafana-server -DESC="Grafana Server" -DEFAULT=/etc/default/$NAME - -GRAFANA_USER=grafana -GRAFANA_GROUP=grafana -GRAFANA_HOME=/usr/share/grafana -CONF_DIR=/etc/grafana -WORK_DIR=$GRAFANA_HOME -DATA_DIR=/var/lib/grafana -LOG_DIR=/var/log/grafana -CONF_FILE=$CONF_DIR/grafana.ini -MAX_OPEN_FILES=10000 -PID_FILE=/var/run/$NAME.pid -DAEMON=/usr/sbin/$NAME - -umask 0027 - -if [ `id -u` -ne 0 ]; then - echo "You need root privileges to run this script" - exit 4 -fi - -if [ ! -x $DAEMON ]; then - echo "Program not installed or not executable" - exit 5 -fi - -. /lib/lsb/init-functions - -if [ -r /etc/default/rcS ]; then - . /etc/default/rcS -fi - -# overwrite settings from default file -if [ -f "$DEFAULT" ]; then - . "$DEFAULT" -fi - -DAEMON_OPTS="--pidfile=${PID_FILE} --config=${CONF_FILE} cfg:default.paths.data=${DATA_DIR} cfg:default.paths.logs=${LOG_DIR}" - -case "$1" in - start) - - log_daemon_msg "Starting $DESC" - - pid=`pidofproc -p $PID_FILE grafana` - if [ -n "$pid" ] ; then - log_begin_msg "Already running." - log_end_msg 0 - exit 0 - fi - - # Prepare environment - mkdir -p "$LOG_DIR" "$DATA_DIR" && chown "$GRAFANA_USER":"$GRAFANA_GROUP" "$LOG_DIR" "$DATA_DIR" - touch "$PID_FILE" && chown "$GRAFANA_USER":"$GRAFANA_GROUP" "$PID_FILE" - - if [ -n "$MAX_OPEN_FILES" ]; then - ulimit -n $MAX_OPEN_FILES - fi - - # Start Daemon - start-stop-daemon --start -b --chdir "$WORK_DIR" --user "$GRAFANA_USER" -c "$GRAFANA_USER" --pidfile "$PID_FILE" --exec $DAEMON -- $DAEMON_OPTS - return=$? - if [ $return -eq 0 ] - then - sleep 1 - - # check if pid file has been written two - if ! [[ -s $PID_FILE ]]; then - log_end_msg 1 - exit 1 - fi - - i=0 - timeout=10 - # Wait for the process to be properly started before exiting - until { cat "$PID_FILE" | xargs kill -0; } >/dev/null 2>&1 - do - sleep 1 - i=$(($i + 1)) - if [ $i -gt $timeout ]; then - log_end_msg 1 - exit 1 - fi - done - fi - log_end_msg $return - ;; - stop) - log_daemon_msg "Stopping $DESC" - - if [ -f "$PID_FILE" ]; then - start-stop-daemon --stop --pidfile "$PID_FILE" \ - --user "$GRAFANA_USER" \ - --retry=TERM/20/KILL/5 >/dev/null - if [ $? -eq 1 ]; then - log_progress_msg "$DESC is not running but pid file exists, cleaning up" - elif [ $? -eq 3 ]; then - PID="`cat $PID_FILE`" - log_failure_msg "Failed to stop $DESC (pid $PID)" - exit 1 - fi - rm -f "$PID_FILE" - else - log_progress_msg "(not running)" - fi - log_end_msg 0 - ;; - status) - status_of_proc -p $PID_FILE grafana grafana && exit 0 || exit $? - ;; - restart|force-reload) - if [ -f "$PID_FILE" ]; then - $0 stop - sleep 1 - fi - $0 start - ;; - *) - log_success_msg "Usage: $0 {start|stop|restart|force-reload|status}" - exit 3 - ;; -esac diff --git a/devstack/files/grafana/grafana.ini b/devstack/files/grafana/grafana.ini deleted file mode 100644 index b0af9c00a..000000000 --- a/devstack/files/grafana/grafana.ini +++ /dev/null @@ -1,13 +0,0 @@ -[auth.keystone] -enabled = true -auth_url = '%KEYSTONE_AUTH_URI%' - -[paths] -plugins = /var/lib/grafana/plugins - -[session] -session_life_time = 1800 -gc_interval_time = 1800 - -[security] -login_remember_days = 0 diff --git a/devstack/files/influxdb/influxdb b/devstack/files/influxdb/influxdb deleted file mode 100644 index 4fcff8538..000000000 --- a/devstack/files/influxdb/influxdb +++ /dev/null @@ -1,19 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Logging -STDERR="/var/log/influxdb/influxd.log" diff --git a/devstack/files/influxdb/influxdb.conf b/devstack/files/influxdb/influxdb.conf deleted file mode 100644 index 1d2ee917a..000000000 --- a/devstack/files/influxdb/influxdb.conf +++ /dev/null @@ -1,579 +0,0 @@ -### Welcome to the InfluxDB configuration file. - -# The values in this file override the default values used by the system if -# a config option is not specified. The commented out lines are the configuration -# field and the default value used. Uncommenting a line and changing the value -# will change the value used at runtime when the process is restarted. - -# Once every 24 hours InfluxDB will report usage data to usage.influxdata.com -# The data includes a random ID, os, arch, version, the number of series and other -# usage data. No data from user databases is ever transmitted. -# Change this option to true to disable reporting. -# reporting-disabled = false - -# Bind address to use for the RPC service for backup and restore. -# bind-address = "127.0.0.1:8088" - -### -### [meta] -### -### Controls the parameters for the Raft consensus group that stores metadata -### about the InfluxDB cluster. -### - -[meta] - # Where the metadata/raft database is stored - dir = "/var/lib/influxdb/meta" - - # Automatically create a default retention policy when creating a database. - # retention-autocreate = true - - # If log messages are printed for the meta service - # logging-enabled = true - -### -### [data] -### -### Controls where the actual shard data for InfluxDB lives and how it is -### flushed from the WAL. "dir" may need to be changed to a suitable place -### for your system, but the WAL settings are an advanced configuration. The -### defaults should work for most systems. -### - -[data] - # The directory where the TSM storage engine stores TSM files. - dir = "/var/lib/influxdb/data" - - # The directory where the TSM storage engine stores WAL files. - wal-dir = "/var/lib/influxdb/wal" - - # The amount of time that a write will wait before fsyncing. A duration - # greater than 0 can be used to batch up multiple fsync calls. This is useful for slower - # disks or when WAL write contention is seen. A value of 0s fsyncs every write to the WAL. - # Values in the range of 0-100ms are recommended for non-SSD disks. - # wal-fsync-delay = "0s" - - - # The type of shard index to use for new shards. The default is an in-memory index that is - # recreated at startup. A value of "tsi1" will use a disk based index that supports higher - # cardinality datasets. - # index-version = "inmem" - index-version = "tsi1" - - # Trace logging provides more verbose output around the tsm engine. Turning - # this on can provide more useful output for debugging tsm engine issues. - # trace-logging-enabled = false - - # Whether queries should be logged before execution. Very useful for troubleshooting, but will - # log any sensitive data contained within a query. - # query-log-enabled = true - - # Validates incoming writes to ensure keys only have valid unicode characters. - # This setting will incur a small overhead because every key must be checked. - # validate-keys = false - - # Settings for the TSM engine - - # CacheMaxMemorySize is the maximum size a shard's cache can - # reach before it starts rejecting writes. - # Valid size suffixes are k, m, or g (case insensitive, 1024 = 1k). - # Values without a size suffix are in bytes. - # cache-max-memory-size = "1g" - - # CacheSnapshotMemorySize is the size at which the engine will - # snapshot the cache and write it to a TSM file, freeing up memory - # Valid size suffixes are k, m, or g (case insensitive, 1024 = 1k). - # Values without a size suffix are in bytes. - # cache-snapshot-memory-size = "25m" - - # CacheSnapshotWriteColdDuration is the length of time at - # which the engine will snapshot the cache and write it to - # a new TSM file if the shard hasn't received writes or deletes - # cache-snapshot-write-cold-duration = "10m" - - # CompactFullWriteColdDuration is the duration at which the engine - # will compact all TSM files in a shard if it hasn't received a - # write or delete - # compact-full-write-cold-duration = "4h" - - # The maximum number of concurrent full and level compactions that can run at one time. A - # value of 0 results in 50% of runtime.GOMAXPROCS(0) used at runtime. Any number greater - # than 0 limits compactions to that value. This setting does not apply - # to cache snapshotting. - # max-concurrent-compactions = 0 - - # CompactThroughput is the rate limit in bytes per second that we - # will allow TSM compactions to write to disk. Note that short bursts are allowed - # to happen at a possibly larger value, set by CompactThroughputBurst - # compact-throughput = "48m" - - # CompactThroughputBurst is the rate limit in bytes per second that we - # will allow TSM compactions to write to disk. - # compact-throughput-burst = "48m" - - # If true, then the mmap advise value MADV_WILLNEED will be provided to the kernel with respect to - # TSM files. This setting has been found to be problematic on some kernels, and defaults to off. - # It might help users who have slow disks in some cases. - # tsm-use-madv-willneed = false - - # Settings for the inmem index - - # The maximum series allowed per database before writes are dropped. This limit can prevent - # high cardinality issues at the database level. This limit can be disabled by setting it to - # 0. - # max-series-per-database = 1000000 - - # The maximum number of tag values per tag that are allowed before writes are dropped. This limit - # can prevent high cardinality tag values from being written to a measurement. This limit can be - # disabled by setting it to 0. - # max-values-per-tag = 100000 - - # Settings for the tsi1 index - - # The threshold, in bytes, when an index write-ahead log file will compact - # into an index file. Lower sizes will cause log files to be compacted more - # quickly and result in lower heap usage at the expense of write throughput. - # Higher sizes will be compacted less frequently, store more series in-memory, - # and provide higher write throughput. - # Valid size suffixes are k, m, or g (case insensitive, 1024 = 1k). - # Values without a size suffix are in bytes. - # max-index-log-file-size = "1m" - - # The size of the internal cache used in the TSI index to store previously - # calculated series results. Cached results will be returned quickly from the cache rather - # than needing to be recalculated when a subsequent query with a matching tag key/value - # predicate is executed. Setting this value to 0 will disable the cache, which may - # lead to query performance issues. - # This value should only be increased if it is known that the set of regularly used - # tag key/value predicates across all measurements for a database is larger than 100. An - # increase in cache size may lead to an increase in heap usage. - series-id-set-cache-size = 100 - -### -### [coordinator] -### -### Controls the clustering service configuration. -### - -[coordinator] - # The default time a write request will wait until a "timeout" error is returned to the caller. - # write-timeout = "10s" - - # The maximum number of concurrent queries allowed to be executing at one time. If a query is - # executed and exceeds this limit, an error is returned to the caller. This limit can be disabled - # by setting it to 0. - # max-concurrent-queries = 0 - - # The maximum time a query will is allowed to execute before being killed by the system. This limit - # can help prevent run away queries. Setting the value to 0 disables the limit. - # query-timeout = "0s" - - # The time threshold when a query will be logged as a slow query. This limit can be set to help - # discover slow or resource intensive queries. Setting the value to 0 disables the slow query logging. - # log-queries-after = "0s" - - # The maximum number of points a SELECT can process. A value of 0 will make - # the maximum point count unlimited. This will only be checked every second so queries will not - # be aborted immediately when hitting the limit. - # max-select-point = 0 - - # The maximum number of series a SELECT can run. A value of 0 will make the maximum series - # count unlimited. - # max-select-series = 0 - - # The maximum number of group by time bucket a SELECT can create. A value of zero will max the maximum - # number of buckets unlimited. - # max-select-buckets = 0 - -### -### [retention] -### -### Controls the enforcement of retention policies for evicting old data. -### - -[retention] - # Determines whether retention policy enforcement enabled. - # enabled = true - - # The interval of time when retention policy enforcement checks run. - # check-interval = "30m" - -### -### [shard-precreation] -### -### Controls the precreation of shards, so they are available before data arrives. -### Only shards that, after creation, will have both a start- and end-time in the -### future, will ever be created. Shards are never precreated that would be wholly -### or partially in the past. - -[shard-precreation] - # Determines whether shard pre-creation service is enabled. - # enabled = true - - # The interval of time when the check to pre-create new shards runs. - # check-interval = "10m" - - # The default period ahead of the endtime of a shard group that its successor - # group is created. - # advance-period = "30m" - -### -### Controls the system self-monitoring, statistics and diagnostics. -### -### The internal database for monitoring data is created automatically if -### if it does not already exist. The target retention within this database -### is called 'monitor' and is also created with a retention period of 7 days -### and a replication factor of 1, if it does not exist. In all cases the -### this retention policy is configured as the default for the database. - -[monitor] - # Whether to record statistics internally. - # store-enabled = true - - # The destination database for recorded statistics - # store-database = "_internal" - - # The interval at which to record statistics - # store-interval = "10s" - -### -### [http] -### -### Controls how the HTTP endpoints are configured. These are the primary -### mechanism for getting data into and out of InfluxDB. -### - -[http] - # Determines whether HTTP endpoint is enabled. - # enabled = true - - # Determines whether the Flux query endpoint is enabled. - # flux-enabled = false - - # Determines whether the Flux query logging is enabled. - # flux-log-enabled = false - - # The bind address used by the HTTP service. - # bind-address = ":8086" - - # Determines whether user authentication is enabled over HTTP/HTTPS. - # auth-enabled = false - - # The default realm sent back when issuing a basic auth challenge. - # realm = "InfluxDB" - - # Determines whether HTTP request logging is enabled. - # log-enabled = true - - # Determines whether the HTTP write request logs should be suppressed when the log is enabled. - # suppress-write-log = false - - # When HTTP request logging is enabled, this option specifies the path where - # log entries should be written. If unspecified, the default is to write to stderr, which - # intermingles HTTP logs with internal InfluxDB logging. - # - # If influxd is unable to access the specified path, it will log an error and fall back to writing - # the request log to stderr. - # access-log-path = "" - - # Filters which requests should be logged. Each filter is of the pattern NNN, NNX, or NXX where N is - # a number and X is a wildcard for any number. To filter all 5xx responses, use the string 5xx. - # If multiple filters are used, then only one has to match. The default is to have no filters which - # will cause every request to be printed. - # access-log-status-filters = [] - - # Determines whether detailed write logging is enabled. - # write-tracing = false - - # Determines whether the pprof endpoint is enabled. This endpoint is used for - # troubleshooting and monitoring. - # pprof-enabled = true - pprof-enabled = false - - # Enables a pprof endpoint that binds to localhost:6060 immediately on startup. - # This is only needed to debug startup issues. - # debug-pprof-enabled = false - - # Determines whether HTTPS is enabled. - # https-enabled = false - - # The SSL certificate to use when HTTPS is enabled. - # https-certificate = "/etc/ssl/influxdb.pem" - - # Use a separate private key location. - # https-private-key = "" - - # The JWT auth shared secret to validate requests using JSON web tokens. - # shared-secret = "" - - # The default chunk size for result sets that should be chunked. - # max-row-limit = 0 - - # The maximum number of HTTP connections that may be open at once. New connections that - # would exceed this limit are dropped. Setting this value to 0 disables the limit. - # max-connection-limit = 0 - - # Enable http service over unix domain socket - # unix-socket-enabled = false - - # The path of the unix domain socket. - # bind-socket = "/var/run/influxdb.sock" - - # The maximum size of a client request body, in bytes. Setting this value to 0 disables the limit. - # max-body-size = 25000000 - - # The maximum number of writes processed concurrently. - # Setting this to 0 disables the limit. - # max-concurrent-write-limit = 0 - - # The maximum number of writes queued for processing. - # Setting this to 0 disables the limit. - # max-enqueued-write-limit = 0 - - # The maximum duration for a write to wait in the queue to be processed. - # Setting this to 0 or setting max-concurrent-write-limit to 0 disables the limit. - # enqueued-write-timeout = 0 - -### -### [logging] -### -### Controls how the logger emits logs to the output. -### - -[logging] - # Determines which log encoder to use for logs. Available options - # are auto, logfmt, and json. auto will use a more a more user-friendly - # output format if the output terminal is a TTY, but the format is not as - # easily machine-readable. When the output is a non-TTY, auto will use - # logfmt. - # format = "auto" - - # Determines which level of logs will be emitted. The available levels - # are error, warn, info, and debug. Logs that are equal to or above the - # specified level will be emitted. - # level = "info" - - # Suppresses the logo output that is printed when the program is started. - # The logo is always suppressed if STDOUT is not a TTY. - # suppress-logo = false - -### -### [subscriber] -### -### Controls the subscriptions, which can be used to fork a copy of all data -### received by the InfluxDB host. -### - -[subscriber] - # Determines whether the subscriber service is enabled. - # enabled = true - - # The default timeout for HTTP writes to subscribers. - # http-timeout = "30s" - - # Allows insecure HTTPS connections to subscribers. This is useful when testing with self- - # signed certificates. - # insecure-skip-verify = false - - # The path to the PEM encoded CA certs file. If the empty string, the default system certs will be used - # ca-certs = "" - - # The number of writer goroutines processing the write channel. - # write-concurrency = 40 - - # The number of in-flight writes buffered in the write channel. - # write-buffer-size = 1000 - - -### -### [[graphite]] -### -### Controls one or many listeners for Graphite data. -### - -[[graphite]] - # Determines whether the graphite endpoint is enabled. - # enabled = false - # database = "graphite" - # retention-policy = "" - # bind-address = ":2003" - # protocol = "tcp" - # consistency-level = "one" - - # These next lines control how batching works. You should have this enabled - # otherwise you could get dropped metrics or poor performance. Batching - # will buffer points in memory if you have many coming in. - - # Flush if this many points get buffered - # batch-size = 5000 - - # number of batches that may be pending in memory - # batch-pending = 10 - - # Flush at least this often even if we haven't hit buffer limit - # batch-timeout = "1s" - - # UDP Read buffer size, 0 means OS default. UDP listener will fail if set above OS max. - # udp-read-buffer = 0 - - ### This string joins multiple matching 'measurement' values providing more control over the final measurement name. - # separator = "." - - ### Default tags that will be added to all metrics. These can be overridden at the template level - ### or by tags extracted from metric - # tags = ["region=us-east", "zone=1c"] - - ### Each template line requires a template pattern. It can have an optional - ### filter before the template and separated by spaces. It can also have optional extra - ### tags following the template. Multiple tags should be separated by commas and no spaces - ### similar to the line protocol format. There can be only one default template. - # templates = [ - # "*.app env.service.resource.measurement", - # # Default template - # "server.*", - # ] - -### -### [collectd] -### -### Controls one or many listeners for collectd data. -### - -[[collectd]] - # enabled = false - # bind-address = ":25826" - # database = "collectd" - # retention-policy = "" - # - # The collectd service supports either scanning a directory for multiple types - # db files, or specifying a single db file. - # typesdb = "/usr/local/share/collectd" - # - # security-level = "none" - # auth-file = "/etc/collectd/auth_file" - - # These next lines control how batching works. You should have this enabled - # otherwise you could get dropped metrics or poor performance. Batching - # will buffer points in memory if you have many coming in. - - # Flush if this many points get buffered - # batch-size = 5000 - - # Number of batches that may be pending in memory - # batch-pending = 10 - - # Flush at least this often even if we haven't hit buffer limit - # batch-timeout = "10s" - - # UDP Read buffer size, 0 means OS default. UDP listener will fail if set above OS max. - # read-buffer = 0 - - # Multi-value plugins can be handled two ways. - # "split" will parse and store the multi-value plugin data into separate measurements - # "join" will parse and store the multi-value plugin as a single multi-value measurement. - # "split" is the default behavior for backward compatibility with previous versions of influxdb. - # parse-multivalue-plugin = "split" -### -### [opentsdb] -### -### Controls one or many listeners for OpenTSDB data. -### - -[[opentsdb]] - # enabled = false - # bind-address = ":4242" - # database = "opentsdb" - # retention-policy = "" - # consistency-level = "one" - # tls-enabled = false - # certificate= "/etc/ssl/influxdb.pem" - - # Log an error for every malformed point. - # log-point-errors = true - - # These next lines control how batching works. You should have this enabled - # otherwise you could get dropped metrics or poor performance. Only points - # metrics received over the telnet protocol undergo batching. - - # Flush if this many points get buffered - # batch-size = 1000 - - # Number of batches that may be pending in memory - # batch-pending = 5 - - # Flush at least this often even if we haven't hit buffer limit - # batch-timeout = "1s" - -### -### [[udp]] -### -### Controls the listeners for InfluxDB line protocol data via UDP. -### - -[[udp]] - # enabled = false - # bind-address = ":8089" - # database = "udp" - # retention-policy = "" - - # InfluxDB precision for timestamps on received points ("" or "n", "u", "ms", "s", "m", "h") - # precision = "" - - # These next lines control how batching works. You should have this enabled - # otherwise you could get dropped metrics or poor performance. Batching - # will buffer points in memory if you have many coming in. - - # Flush if this many points get buffered - # batch-size = 5000 - - # Number of batches that may be pending in memory - # batch-pending = 10 - - # Will flush at least this often even if we haven't hit buffer limit - # batch-timeout = "1s" - - # UDP Read buffer size, 0 means OS default. UDP listener will fail if set above OS max. - # read-buffer = 0 - -### -### [continuous_queries] -### -### Controls how continuous queries are run within InfluxDB. -### - -[continuous_queries] - # Determines whether the continuous query service is enabled. - # enabled = true - - # Controls whether queries are logged when executed by the CQ service. - # log-enabled = true - - # Controls whether queries are logged to the self-monitoring data store. - # query-stats-enabled = false - - # interval for how often continuous queries will be checked if they need to run - # run-interval = "1s" - -### -### [tls] -### -### Global configuration settings for TLS in InfluxDB. -### - -[tls] - # Determines the available set of cipher suites. See https://golang.org/pkg/crypto/tls/#pkg-constants - # for a list of available ciphers, which depends on the version of Go (use the query - # SHOW DIAGNOSTICS to see the version of Go used to build InfluxDB). If not specified, uses - # the default settings from Go's crypto/tls package. - # ciphers = [ - # "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", - # "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", - # ] - - # Minimum version of the tls protocol that will be negotiated. If not specified, uses the - # default settings from Go's crypto/tls package. - # min-version = "tls1.2" - - # Maximum version of the tls protocol that will be negotiated. If not specified, uses the - # default settings from Go's crypto/tls package. - # max-version = "tls1.2" diff --git a/devstack/files/kafka/kafka-server-start.sh b/devstack/files/kafka/kafka-server-start.sh deleted file mode 100644 index 87b31d56a..000000000 --- a/devstack/files/kafka/kafka-server-start.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] server.properties" - exit 1 -fi -base_dir=$(dirname $0) -export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -export KAFKA_HEAP_OPTS="-Xms256m -Xmx256m" - -EXTRA_ARGS="-name kafkaServer -loggc" - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $base_dir/kafka-run-class.sh $EXTRA_ARGS kafka.Kafka $@ diff --git a/devstack/files/kafka/kafka.service b/devstack/files/kafka/kafka.service deleted file mode 100644 index 9b311704d..000000000 --- a/devstack/files/kafka/kafka.service +++ /dev/null @@ -1,30 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -[Unit] -Description=Kafka -Requires=network.target -After=network.target zookeeper.service - -[Service] -User=kafka -Group=kafka -LimitNOFILE=32768:32768 -Environment="LOG_DIR=/var/log/kafka" -Environment="KAFKA_HEAP_OPTS=-Xmx128m" -ExecStart=/opt/kafka/bin/kafka-server-start.sh /etc/kafka/server.properties -Restart=on-failure - -[Install] -WantedBy=multi-user.target diff --git a/devstack/files/kafka/server.properties b/devstack/files/kafka/server.properties deleted file mode 100644 index f592903e5..000000000 --- a/devstack/files/kafka/server.properties +++ /dev/null @@ -1,146 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=2 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/var/kafka - -auto.create.topics.enable=false -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -# Specify the message format version the broker will use to append messages to -# the logs. Once consumers are upgraded, one can change the message format and -# enjoy the new message format that includes new timestamp and improved -# compression. -# (TODO) Use new message format after updating consumers -log.message.format.version=0.9.0.0 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended for to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=24 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -log.retention.bytes=104857600 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=104857600 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=127.0.0.1:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=6000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file diff --git a/devstack/files/kibana/kibana.yml b/devstack/files/kibana/kibana.yml deleted file mode 100644 index b45738a3b..000000000 --- a/devstack/files/kibana/kibana.yml +++ /dev/null @@ -1,122 +0,0 @@ -# Kibana is served by a back end server. This setting specifies the port to use. -server.port: %KIBANA_SERVICE_PORT% - -# Specifies the address to which the Kibana server will bind. IP addresses and host names are both valid values. -# The default is 'localhost', which usually means remote machines will not be able to connect. -# To allow connections from remote users, set this parameter to a non-loopback address. -server.host: %KIBANA_SERVICE_HOST% - -# Enables you to specify a path to mount Kibana at if you are running behind a proxy. -# Use the `server.rewriteBasePath` setting to tell Kibana if it should remove the basePath -# from requests it receives, and to prevent a deprecation warning at startup. -# This setting cannot end in a slash. -server.basePath: %KIBANA_SERVER_BASE_PATH% - -# Specifies whether Kibana should rewrite requests that are prefixed with -# `server.basePath` or require that they are rewritten by your reverse proxy. -# This setting was effectively always `false` before Kibana 6.3 and will -# default to `true` starting in Kibana 7.0. -server.rewriteBasePath: false - -# The maximum payload size in bytes for incoming server requests. -#server.maxPayloadBytes: 1048576 - -# The Kibana server's name. This is used for display purposes. -#server.name: "your-hostname" - -# The URLs of the Elasticsearch instances to use for all your queries. -elasticsearch.hosts: ["http://%ES_SERVICE_BIND_HOST%:%ES_SERVICE_BIND_PORT%"] - -# When this setting's value is true Kibana uses the hostname specified in the server.host -# setting. When the value of this setting is false, Kibana uses the hostname of the host -# that connects to this Kibana instance. -#elasticsearch.preserveHost: true - -# Kibana uses an index in Elasticsearch to store saved searches, visualizations and -# dashboards. Kibana creates a new index if the index doesn't already exist. -kibana.index: ".kibana" - -# The default application to load. -kibana.defaultAppId: "discover" - -# If your Elasticsearch is protected with basic authentication, these settings provide -# the username and password that the Kibana server uses to perform maintenance on the Kibana -# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which -# is proxied through the Kibana server. -#elasticsearch.username: "kibana" -#elasticsearch.password: "pass" - -# Enables SSL and paths to the PEM-format SSL certificate and SSL key files, respectively. -# These settings enable SSL for outgoing requests from the Kibana server to the browser. -#server.ssl.enabled: false -#server.ssl.certificate: /path/to/your/server.crt -#server.ssl.key: /path/to/your/server.key - -# Optional settings that provide the paths to the PEM-format SSL certificate and key files. -# These files validate that your Elasticsearch backend uses the same key files. -#elasticsearch.ssl.certificate: /path/to/your/client.crt -#elasticsearch.ssl.key: /path/to/your/client.key - -# Optional setting that enables you to specify a path to the PEM file for the certificate -# authority for your Elasticsearch instance. -#elasticsearch.ssl.certificateAuthorities: [ "/path/to/your/CA.pem" ] - -# To disregard the validity of SSL certificates, change this setting's value to 'none'. -#elasticsearch.ssl.verificationMode: full - -# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of -# the elasticsearch.requestTimeout setting. -elasticsearch.pingTimeout: 1500 - -# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value -# must be a positive integer. -elasticsearch.requestTimeout: 300000 - -# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side -# headers, set this value to [] (an empty list). -#elasticsearch.requestHeadersWhitelist: [ authorization ] - -# Header names and values that are sent to Elasticsearch. Any custom headers cannot be overwritten -# by client-side headers, regardless of the elasticsearch.requestHeadersWhitelist configuration. -#elasticsearch.customHeaders: {} - -# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable. -elasticsearch.shardTimeout: 0 - -# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying. -elasticsearch.startupTimeout: 5000 - -# Logs queries sent to Elasticsearch. Requires logging.verbose set to true. -#elasticsearch.logQueries: false - -# Specifies the path where Kibana creates the process ID file. -#pid.file: /var/run/kibana.pid - -# Enables you specify a file where Kibana stores log output. -#logging.dest: stdout - -# Set the value of this setting to true to suppress all logging output. -logging.silent: false - -# Set the value of this setting to true to suppress all logging output other than error messages. -logging.quiet: false - -# Set the value of this setting to true to log all events, including system usage information -# and all requests. -logging.verbose: true - -# Set the interval in milliseconds to sample system and process performance -# metrics. Minimum is 100ms. Defaults to 5000. -#ops.interval: 5000 - -# Specifies locale to be used for all localizable strings, dates and number formats. -# Supported languages are the following: English - en , by default , Chinese - zh-CN . -#i18n.locale: "en" - -optimize.useBundleCache: False -console.enabled: False - -monasca-kibana-plugin.enabled: True -monasca-kibana-plugin.auth_uri: %KEYSTONE_AUTH_URI% -monasca-kibana-plugin.elasticsearch.url: "http://%ES_SERVICE_BIND_HOST%:%ES_SERVICE_BIND_PORT%" -monasca-kibana-plugin.cookie.isSecure: False \ No newline at end of file diff --git a/devstack/files/monasca-agent/elastic.yaml b/devstack/files/monasca-agent/elastic.yaml deleted file mode 100644 index 154d20ab5..000000000 --- a/devstack/files/monasca-agent/elastic.yaml +++ /dev/null @@ -1,7 +0,0 @@ -init_config: -instances: -- url: http://{{IP}}:9200 - cluster_stats: true - pshard_stats: true - index_stats: true - pending_task_stats: true \ No newline at end of file diff --git a/devstack/files/monasca-agent/host_alive.yaml b/devstack/files/monasca-agent/host_alive.yaml deleted file mode 100644 index 58998965f..000000000 --- a/devstack/files/monasca-agent/host_alive.yaml +++ /dev/null @@ -1,8 +0,0 @@ -init_config: - ping_timeout: 1 - ssh_port: 22 - ssh_timeout: 0.5 -instances: - - alive_test: ssh - host_name: 127.0.0.1 - name: 127.0.0.1 diff --git a/devstack/files/monasca-agent/http_check.yaml b/devstack/files/monasca-agent/http_check.yaml deleted file mode 100644 index 1a66f37fd..000000000 --- a/devstack/files/monasca-agent/http_check.yaml +++ /dev/null @@ -1,27 +0,0 @@ -init_config: null -instances: - - name: keystone - dimensions: - service: keystone - timeout: 3 - url: http://127.0.0.1/identity - - name: mysql - dimensions: - service: mysql - timeout: 3 - url: http://127.0.0.1:3306 - - name: influxdb - dimensions: - service: influxdb - timeout: 3 - url: http://127.0.0.1:8086/ping - - name: elasticsearch - dimensions: - service: elasticsearch - timeout: 3 - url: http://{{IP}}:9200/_cat/health - - name: kibana - dimensions: - service: kibana - timeout: 3 - url: http://{{IP}}:5601/api/status diff --git a/devstack/files/monasca-agent/kafka_consumer.yaml b/devstack/files/monasca-agent/kafka_consumer.yaml deleted file mode 100644 index 164cc6f83..000000000 --- a/devstack/files/monasca-agent/kafka_consumer.yaml +++ /dev/null @@ -1,14 +0,0 @@ -init_config: - -instances: - - built_by: Kafka - consumer_groups: - 1_metrics: - metrics: [] - thresh-event: - events: [] - thresh-metric: - metrics: [] - kafka_connect_str: 127.0.0.1:9092 - name: 127.0.0.1:9092 - per_partition: false diff --git a/devstack/files/monasca-agent/monasca-reconfigure b/devstack/files/monasca-agent/monasca-reconfigure deleted file mode 100644 index 108c9042e..000000000 --- a/devstack/files/monasca-agent/monasca-reconfigure +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh -'/opt/monasca-agent/bin/monasca-setup' \ - -u 'monasca-agent' \ - -p 'password' \ - -s '%MONASCA_SERVICE_TYPE%' \ - \ - --keystone_url '%KEYSTONE_AUTH_URI%' \ - --project_name 'mini-mon' \ - --project_domain_name '%SERVICE_DOMAIN_NAME%' \ - --user_domain_name '%SERVICE_DOMAIN_NAME%' \ - \ - --service_type '%MONASCA_SERVICE_TYPE%' \ - --endpoint_type 'public' \ - --region_name '%REGION_NAME%' \ - \ - --check_frequency '15' \ - --log_level 'DEBUG' \ - --overwrite \ - --system_only \ - --monasca_statsd_port %MONASCA_STATSD_PORT% \ No newline at end of file diff --git a/devstack/files/monasca-agent/mysql.yaml b/devstack/files/monasca-agent/mysql.yaml deleted file mode 100644 index c1c83cb86..000000000 --- a/devstack/files/monasca-agent/mysql.yaml +++ /dev/null @@ -1,8 +0,0 @@ -init_config: -instances: - - built_by: MySQL - name: mysql - server: 127.0.0.1 - port: 3306 - user: root - pass: secretdatabase diff --git a/devstack/files/monasca-agent/process.yaml b/devstack/files/monasca-agent/process.yaml deleted file mode 100644 index 42a90831a..000000000 --- a/devstack/files/monasca-agent/process.yaml +++ /dev/null @@ -1,87 +0,0 @@ -init_config: - -instances: - - name: influxd - detailed: true - dimensions: - service: influxd - exact_match: false - search_string: - - influxd - - name: monasca-statsd - detailed: true - dimensions: - service: monasca-statsd - exact_match: false - search_string: - - monasca-statsd - - name: monasca-notification - detailed: true - dimensions: - service: monasca-notification - exact_match: false - search_string: - - monasca-notification - - name: persister - detailed: true - dimensions: - service: persister - exact_match: false - search_string: - - persister - - name: storm - detailed: true - dimensions: - service: storm - exact_match: false - search_string: - - storm - - name: monasca-api - detailed: true - dimensions: - service: uwsgi - exact_match: false - search_string: - - uwsgi - - name: monasca-collector - detailed: true - dimensions: - service: monasca-collector - exact_match: false - search_string: - - monasca-collector - - name: memcached - detailed: true - dimensions: - service: memcached - exact_match: false - search_string: - - memcached - - name: monasca-forwarder - detailed: true - dimensions: - service: monasca-forwarder - exact_match: false - search_string: - - monasca-forwarder - - name: zookeeper - detailed: true - dimensions: - service: zookeeper - exact_match: false - search_string: - - zookeeper - - name: kafka - detailed: true - dimensions: - service: kafka - exact_match: false - search_string: - - kafka - - name: mysqld - detailed: true - dimensions: - service: mysqld - exact_match: false - search_string: - - mysqld diff --git a/devstack/files/monasca-agent/zk.yaml b/devstack/files/monasca-agent/zk.yaml deleted file mode 100644 index b67f80e55..000000000 --- a/devstack/files/monasca-agent/zk.yaml +++ /dev/null @@ -1,6 +0,0 @@ -init_config: - -instances: - - host: 127.0.0.1 - port: 2181 - timeout: 3 diff --git a/devstack/files/monasca-api/apache-monasca-api.template b/devstack/files/monasca-api/apache-monasca-api.template deleted file mode 100644 index 5748e1443..000000000 --- a/devstack/files/monasca-api/apache-monasca-api.template +++ /dev/null @@ -1,18 +0,0 @@ -Listen %PUBLICPORT% - - - WSGIDaemonProcess monasca-api user=%USER% processes=%APIWORKERS% threads=1 display-name=%{GROUP} %VIRTUALENV% - WSGIProcessGroup monasca-api - WSGIScriptAlias / %PUBLICWSGI%/wsgi.py - WSGIApplicationGroup %{GLOBAL} - - WSGIPassAuthorization On - - LogLevel info - ErrorLog /var/log/%APACHE_NAME%/monasca-api.log - CustomLog /var/log/%APACHE_NAME%/monasca-api_access.log combined - - SetEnv no-gzip 1 - AddDefaultCharset utf-8 - - \ No newline at end of file diff --git a/devstack/files/monasca-api/api-config.yml b/devstack/files/monasca-api/api-config.yml deleted file mode 100644 index 31a38c501..000000000 --- a/devstack/files/monasca-api/api-config.yml +++ /dev/null @@ -1,186 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# The region for which all metrics passing through this server will be persisted -region: "useast" - -maxQueryLimit: 10000 - -# Whether this server is running on a secure port -accessedViaHttps: false - -# Topic for publishing metrics to -metricsTopic: metrics - -# Topic for publishing domain events to -eventsTopic: events - -validNotificationPeriods: - - 60 - -kafka: - brokerUris: - - "%KAFKA_HOST%:9092" - zookeeperUris: - - "127.0.0.1:2181" - healthCheckTopic: healthcheck - -mysql: - driverClass: com.mysql.jdbc.Driver - url: "jdbc:mysql://%DATABASE_HOST%:%DATABASE_PORT%/mon?connectTimeout=5000&autoReconnect=true&useSSL=true&useLegacyDatetimeCode=false&serverTimezone=UTC&characterEncoding=utf8" - user: "%DATABASE_USER%" - password: "%DATABASE_PASSWORD%" - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 8 - maxSize: 32 - checkConnectionWhileIdle: false - checkConnectionOnBorrow: true - -# Possible options for dataSourceClassName are: -# - com.mysql.jdbc.jdbc2.optional.MysqlDataSource -# - org.postgresql.ds.PGPoolingDataSource -# HINT: dataSourceUrl is relevant only for mysql -hibernate: - supportEnabled: %MONASCA_DATABASE_USE_ORM% - providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider - dataSourceClassName: %MONASCA_API_DATABASE_ENGINE% - dataSourceUrl: jdbc:mysql://%MYSQL_HOST%:%MYSQL_PORT%/mon - serverName: %DATABASE_HOST% - portNumber: %DATABASE_PORT% - databaseName: mon - user: %DATABASE_USER% - password: %DATABASE_PASSWORD% - initialConnections: 25 - maxConnections: 100 - autoConfig: validate - -databaseConfiguration: - databaseType: "%MONASCA_METRICS_DB%" - -# Uncomment if databaseType is influxDB -influxDB: - # Retention policy may be left blank to indicate default policy. - retentionPolicy: - maxHttpConnections: 100 - gzip: true - name: "mon" - url: "http://%INFLUXDB_HOST%:%INFLUXDB_PORT%" - user: "mon_api" - password: "password" - - -# Uncomment if databaseType is vertica -# As of 7/10 there is a bug in the monasca-api that requires this section even if databaseType is set to influxdb -vertica: - driverClass: com.vertica.jdbc.Driver - url: "jdbc:vertica://%VERTICA_HOST%:5433/mon" - user: "mon_api" - password: "password" - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 4 - maxSize: 32 - checkConnectionWhileIdle: true - # - # vertica database hint to be added to SELECT - # statements. For example, the hint below is used - # to tell vertica that the query can be satisfied - # locally (replicated projection). - # - # dbHint: "/*+KV(01)*/" - dbHint: "" - -middleware: - enabled: true - serverVIP: "%KEYSTONE_SERVICE_HOST%" - serverPort: %KEYSTONE_SERVICE_PORT% - useHttps: False - truststore: "None" - truststorePassword: "None" - connTimeout: 5000 - connSSLClientAuth: False - keystore: "None" - keystorePassword: "None" - connPoolMaxActive: 3 - connPoolMaxIdle: 3 - connPoolEvictPeriod: 600000 - connPoolMinIdleTime: 600000 - connRetryTimes: 2 - connRetryInterval: 50 - defaultAuthorizedRoles: [monasca-user] - readOnlyAuthorizedRoles: [monasca-read-only-user] - agentAuthorizedRoles: [monasca-agent] - delegateAuthorizedRole: admin - adminRole: admin - adminAuthMethod: password - adminUser: "admin" - adminPassword: "%ADMIN_PASSWORD%" - adminProjectId: - adminProjectName: "admin" - adminToken: - timeToCacheToken: 600 - maxTokenCacheSize: 1048576 - -server: - applicationConnectors: - - type: http - port: %MONASCA_API_SERVICE_PORT% - bindHost: %MONASCA_API_SERVICE_HOST% - maxRequestHeaderSize: 16KiB # Allow large headers used by keystone tokens - adminConnectors: - - type: http - port: %MONASCA_API_ADMIN_PORT% - bindHost: %MONASCA_API_SERVICE_HOST% - requestLog: - timeZone: UTC - appenders: - - type: file - currentLogFilename: "/var/log/monasca/api/request.log" - threshold: ALL - archive: true - archivedLogFilenamePattern: "/var/log/monasca/api/request-%d.log.gz" - archivedFileCount: 5 - -# Logging settings. -logging: - - # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, DEBUG, TRACE, or ALL. - level: WARN - - # Logger-specific levels. - loggers: - - # Sets the level for 'com.example.app' to DEBUG. - com.example.app: DEBUG - - appenders: - - type: console - threshold: ALL - timeZone: UTC - target: stdout - logFormat: # TODO - - - type: file - currentLogFilename: "/var/log/monasca/api/monasca-api.log" - threshold: ALL - archive: true - archivedLogFilenamePattern: "/var/log/monasca/api/monasca-api-%d.log.gz" - archivedFileCount: 5 - timeZone: UTC - logFormat: # TODO diff --git a/devstack/files/monasca-api/monasca-api.service b/devstack/files/monasca-api/monasca-api.service deleted file mode 100644 index 89555e743..000000000 --- a/devstack/files/monasca-api/monasca-api.service +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Startup script for the Monasca API - -[Unit] -Description=Monasca API Java app -Requires=network.target -After=network.target kafka.service influxdb.service mysql.service - -[Service] -User=mon-api -Group=monasca -ExecStart=/usr/bin/java -Dfile.encoding=UTF-8 -Xmx128m -cp /opt/monasca/monasca-api.jar monasca.api.MonApiApplication server /etc/monasca/api-config.yml -Restart=on-failure - -[Install] -WantedBy=multi-user.target diff --git a/devstack/files/monasca-log-agent/agent.conf b/devstack/files/monasca-log-agent/agent.conf deleted file mode 100644 index f70aedd29..000000000 --- a/devstack/files/monasca-log-agent/agent.conf +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -input { - file { - add_field => { "dimensions" => { "service" => "system" }} - path => "/var/log/syslog" - tags => ["syslog"] - codec => multiline { - negate => "true" - pattern => "^%{SYSLOGTIMESTAMP}" - what => "previous" - } - } -} - -output { - monasca_log_api { - monasca_log_api_url => "%MONASCA_API_URI_V2%" - keystone_api_url => "%KEYSTONE_AUTH_URI%" - project_name => "mini-mon" - username => "monasca-agent" - password => "password" - user_domain_name => "default" - project_domain_name => "default" - dimensions => [ "hostname:devstack" ] - } -} diff --git a/devstack/files/monasca-log-metrics/log-metrics.conf b/devstack/files/monasca-log-metrics/log-metrics.conf deleted file mode 100644 index e48468be6..000000000 --- a/devstack/files/monasca-log-metrics/log-metrics.conf +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -input { - kafka { - bootstrap_servers => "%KAFKA_SERVICE_HOST%:%KAFKA_SERVICE_PORT%" - topics => ["transformed-log"] - group_id => "log-metric" - client_id => "monasca_log_metrics" - consumer_threads => 4 - codec => json - } -} - - -filter { - - # drop logs that have not set log level - if ![log][level] { - drop { periodic_flush => true } - } else { - ruby { - code => " - log_level = event.get('[log][level]').downcase - event.set('[log][level]', log_level) - " - } - } - - # drop logs with log level not in warning,error - if [log][level] not in [warning,error] { - drop { periodic_flush => true } - } - - ruby { - code => " - log_level = event.get('[log][level]').downcase - log_ts = Time.now.to_f * 1000.0 - - # metric name - metric_name = 'log.%s' % log_level - - # build metric - metric = {} - metric['name'] = metric_name - metric['timestamp'] = log_ts - metric['value'] = 1 - metric['dimensions'] = event.get('[log][dimensions]') - metric['value_meta'] = {} - - event.set('[metric]',metric.to_hash) - " - } - - mutate { - remove_field => ["log", "@version", "@timestamp", "log_level_original", "tags"] - } - -} - - -output { - kafka { - bootstrap_servers => "%KAFKA_SERVICE_HOST%:%KAFKA_SERVICE_PORT%" - topic_id => "metrics" - client_id => "monasca_log_metrics" - compression_type => "none" - codec => json - } -} diff --git a/devstack/files/monasca-log-persister/persister.conf b/devstack/files/monasca-log-persister/persister.conf deleted file mode 100644 index 3ab01a5c0..000000000 --- a/devstack/files/monasca-log-persister/persister.conf +++ /dev/null @@ -1,71 +0,0 @@ -# -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -input { - kafka { - bootstrap_servers => "%KAFKA_SERVICE_HOST%:%KAFKA_SERVICE_PORT%" - codec => json - topics => ["transformed-log"] - group_id => "logstash-persister" - } -} - -filter { - date { - match => ["[log][timestamp]", "UNIX"] - target => "@timestamp" - } - - date { - match => ["creation_time", "UNIX"] - target => "creation_time" - } - - grok { - match => { - "[@timestamp]" => "^(?\d{4}-\d{2}-\d{2})" - } - } - - if "dimensions" in [log] { - ruby { - code => " - fieldHash = event.get('[log][dimensions]') - fieldHash.each do |key, value| - event.set(key,value) - end - " - } - } - - mutate { - add_field => { - "message" => "%{[log][message]}" - "log_level" => "%{[log][level]}" - "tenant" => "%{[meta][tenantId]}" - "region" => "%{[meta][region]}" - } - remove_field => ["@version", "host", "type", "tags" ,"_index_date", "meta", "log"] - } -} - -output { - elasticsearch { - index => "logs-%{tenant}-%{index_date}" - document_type => "log" - hosts => ["%ES_SERVICE_BIND_HOST%"] - } -} diff --git a/devstack/files/monasca-log-transformer/transformer.conf b/devstack/files/monasca-log-transformer/transformer.conf deleted file mode 100644 index c284e3ee0..000000000 --- a/devstack/files/monasca-log-transformer/transformer.conf +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -input { - kafka { - bootstrap_servers => "%KAFKA_SERVICE_HOST%:%KAFKA_SERVICE_PORT%" - topics => ["log"] - group_id => "transformer-logstash-consumer" - codec => json - } -} - -filter { - ruby { - code => 'event.set("message_tmp", event.get("[log][message]")[0..49])' - } - grok { - match => { - "message_tmp" => "(?i)(?AUDIT|CRITICAL|DEBUG|INFO|TRACE|ERR(OR)?|WARN(ING)?)|\"level\":\s?(?\d{2})" - } - } - if ! [log_level] { - grok { - match => { - "[log][message]" => "(?i)(?AUDIT|CRITICAL|DEBUG|INFO|TRACE|ERR(OR)?|WARN(ING)?)|\"level\":\s?(?\d{2})" - } - } - } - ruby { - init => ' - LOG_LEVELS_MAP = { - # SYSLOG - "warn" => "Warning", - "err" => "Error", - # Bunyan errcodes - "10" => "Trace", - "20" => "Debug", - "30" => "Info", - "40" => "Warning", - "50" => "Error", - "60" => "Fatal" - } - ' - code => ' - if event.get("log_level") - # keep original value - log_level = event.get("log_level").downcase - if LOG_LEVELS_MAP.has_key?(log_level) - event.set("log_level_original",event.get("log_level")) - event.set("log_level",LOG_LEVELS_MAP[log_level]) - else - event.set("log_level",log_level.capitalize) - end - else - event.set("log_level","Unknown") - end - ' - } - - mutate { - add_field => { - "[log][level]" => "%{log_level}" - } - # remove temporary fields - remove_field => ["message","log_level", "message_tmp"] - } -} - -output { - kafka { - codec => json - bootstrap_servers => "%KAFKA_SERVICE_HOST%:%KAFKA_SERVICE_PORT%" - topic_id => "transformed-log" - } -} \ No newline at end of file diff --git a/devstack/files/monasca-persister/persister.yml b/devstack/files/monasca-persister/persister.yml deleted file mode 100644 index e0593957f..000000000 --- a/devstack/files/monasca-persister/persister.yml +++ /dev/null @@ -1,176 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# Copyright (c) 2017 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -name: monasca-persister - -alarmHistoryConfiguration: - batchSize: %MONASCA_PERSISTER_BATCH_SIZE% - numThreads: 1 - maxBatchTime: %MONASCA_PERSISTER_MAX_BATCH_TIME% - commitBatchTime: %MONASCA_PERSISTER_COMMIT_BATCH_TIME% -# See http://kafka.apache.org/documentation.html#api for semantics and defaults. - topic: alarm-state-transitions - groupId: 1_alarm-state-transitions - consumerId: "mini-mon" - clientId: 1 - -metricConfiguration: - batchSize: %MONASCA_PERSISTER_BATCH_SIZE% - numThreads: %MONASCA_PERSISTER_METRIC_THREADS% - maxBatchTime: %MONASCA_PERSISTER_MAX_BATCH_TIME% - commitBatchTime: %MONASCA_PERSISTER_COMMIT_BATCH_TIME% -# See http://kafka.apache.org/documentation.html#api for semantics and defaults. - topic: metrics - groupId: 1_metrics - consumerId: "mini-mon" - clientId: 1 - -#Kafka settings. -kafkaConfig: -# See http://kafka.apache.org/documentation.html#api for semantics and defaults. - zookeeperConnect: "%ZOOKEEPER_HOST%:2181" - socketTimeoutMs: 30000 - socketReceiveBufferBytes : 65536 - fetchMessageMaxBytes: 1048576 - queuedMaxMessageChunks: 10 - rebalanceMaxRetries: 4 - fetchMinBytes: 1 - fetchWaitMaxMs: 100 - rebalanceBackoffMs: 2000 - refreshLeaderBackoffMs: 200 - autoOffsetReset: largest - consumerTimeoutMs: 1000 - zookeeperSessionTimeoutMs : 60000 - zookeeperConnectionTimeoutMs : 60000 - zookeeperSyncTimeMs: 2000 - -# uncomment if database type is cassandra -cassandraDbConfiguration: - contactPoints: - - %CASSANDRADB_HOST% - port: 9042 - user: cassandra - password: cassandra - keyspace: monasca - localDataCenter: datacenter1 - maxConnections: 5 - maxRequests: 2048 - # socket time out in milliseconds when creating a new connection - connectionTimeout: 5000 - # how long the driver waits for a response from server. Must be - # longer than the server side timeouts in the cassandra.yaml - readTimeout: 60000 - - # number of retries in upsert query. The retry interval is exponential, - # i.e., 1, 2, 4, 8 ... seconds. Retry is blocking. - maxWriteRetries: 5 - maxBatches: 250 - maxDefinitionCacheSize: 2000000 - # ANY(0), - # ONE(1), - # TWO(2), - # THREE(3), - # QUORUM(4), - # ALL(5), - # LOCAL_QUORUM(6), - # EACH_QUORUM(7), - # SERIAL(8), - # LOCAL_SERIAL(9), - # LOCAL_ONE(10); - consistencyLevel: ONE - # number of days metric retention - retentionPolicy: 45 - -verticaMetricRepoConfig: - maxCacheSize: 2000000 - -databaseConfiguration: - databaseType: %MONASCA_PERSISTER_DB_TYPE% - -# Uncomment if databaseType is influxdb -influxDbConfiguration: - # Retention policy may be left blank to indicate default policy. - retentionPolicy: - maxHttpConnections: 100 - gzip: true - name: "mon" - url: "http://%INFLUXDB_HOST%:8086" - user: "mon_persister" - password: "password" - - -# Uncomment if databaseType is vertica -dataSourceFactory: - driverClass: com.vertica.jdbc.Driver - url: "jdbc:vertica://%VERTICA_HOST%:5433/mon" - user: "mon_persister" - password: "password" - properties: - ssl: false - # the maximum amount of time to wait on an empty pool before throwing an exception - maxWaitForConnection: 1s - # the SQL query to run when validating a connection's liveness - validationQuery: "/* MyService Health Check */ SELECT 1" - # the minimum number of connections to keep open - minSize: 8 - # the maximum number of connections to keep open - maxSize: 41 - # whether or not idle connections should be validated - checkConnectionWhileIdle: false - # the maximum lifetime of an idle connection - maxConnectionAge: 1 minute - -metrics: - frequency: 1 second - - -# Logging settings. -logging: - - # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, - # DEBUG, TRACE, or ALL. - level: WARN - - # Logger-specific levels. - loggers: - - # Sets the level for 'com.example.app' to DEBUG. - com.example.app: DEBUG - # com.hpcloud: debug - # com.hpcloud.mon.persister.repository: DEBUG - - appenders: - - - type: file - threshold: INFO - archive: true - currentLogFilename: "%MONASCA_PERSISTER_LOG_DIR%/monasca-persister.log" - archivedLogFilenamePattern: "%MONASCA_PERSISTER_LOG_DIR%/monasca-persister.log-%d.log.gz" - archivedFileCount: 5 - # The timezone used to format dates. HINT: USE THE DEFAULT, UTC. - timeZone: UTC - -server: - applicationConnectors: - - type: http - port: %MONASCA_PERSISTER_APP_PORT% - bindHost: %MONASCA_PERSISTER_BIND_HOST% - adminConnectors: - - type: http - port: %MONASCA_PERSISTER_ADMIN_PORT% - bindHost: %MONASCA_PERSISTER_BIND_HOST% diff --git a/devstack/files/monasca-thresh/monasca-thresh b/devstack/files/monasca-thresh/monasca-thresh deleted file mode 100644 index dc5ce2954..000000000 --- a/devstack/files/monasca-thresh/monasca-thresh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -### BEGIN INIT INFO -# Provides: monasca-thresh -# Required-Start: $nimbus -# Required-Stop: -# Default-Start: 2 3 4 5 -# Default-Stop: -# Short-Description: Monitoring threshold engine running under storm -# Description: -### END INIT INFO - -case "$1" in - start) - $0 status - if [ $? -ne 0 ]; then - sudo -Hu mon-thresh /opt/storm/current/bin/storm jar /opt/monasca/monasca-thresh.jar monasca.thresh.ThresholdingEngine /etc/monasca/thresh-config.yml thresh-cluster - exit $? - else - echo "monasca-thresh is already running" - exit 0 - fi - ;; - stop) - # On system shutdown storm is being shutdown also and this will hang so skip shutting down thresh in that case - if [ -e '/sbin/runlevel' ]; then # upstart/sysV case - if [ $(runlevel | cut -d\ -f 2) == 0 ]; then - exit 0 - fi - else # systemd case - systemctl list-units --type=target |grep shutdown.target - if [ $? -eq 0 ]; then - exit 0 - fi - fi - sudo -Hu mon-thresh /opt/storm/current/bin/storm kill thresh-cluster - # The above command returns but actually takes awhile loop watching status - while true; do - sudo -Hu mon-thresh /opt/storm/current/bin/storm list |grep thresh-cluster - if [ $? -ne 0 ]; then break; fi - sleep 1 - done - ;; - status) - sudo -Hu mon-thresh /opt/storm/current/bin/storm list |grep thresh-cluster - ;; - restart) - $0 stop - $0 start - ;; -esac diff --git a/devstack/files/monasca-thresh/thresh-config.yml b/devstack/files/monasca-thresh/thresh-config.yml deleted file mode 100644 index 526c93686..000000000 --- a/devstack/files/monasca-thresh/thresh-config.yml +++ /dev/null @@ -1,145 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -metricSpoutThreads: 2 -metricSpoutTasks: 2 - -statsdConfig: - host: "127.0.0.1" - port: %MONASCA_STATSD_PORT% - prefix: monasca.storm. - dimensions: !!map - service : monitoring - component : storm - - -metricSpoutConfig: - kafkaConsumerConfiguration: - # See http://kafka.apache.org/documentation.html#api for semantics and defaults. - topic: "metrics" - numThreads: 1 - groupId: "thresh-metric" - zookeeperConnect: "127.0.0.1:2181" - consumerId: 1 - socketTimeoutMs: 30000 - socketReceiveBufferBytes : 65536 - fetchMessageMaxBytes: 1048576 - autoCommitEnable: true - autoCommitIntervalMs: 60000 - queuedMaxMessageChunks: 10 - rebalanceMaxRetries: 4 - fetchMinBytes: 1 - fetchWaitMaxMs: 100 - rebalanceBackoffMs: 2000 - refreshLeaderBackoffMs: 200 - autoOffsetReset: largest - consumerTimeoutMs: -1 - clientId : 1 - zookeeperSessionTimeoutMs : 60000 - zookeeperConnectionTimeoutMs : 60000 - zookeeperSyncTimeMs: 2000 - - -eventSpoutConfig: - kafkaConsumerConfiguration: - # See http://kafka.apache.org/documentation.html#api for semantics and defaults. - topic: "events" - numThreads: 1 - groupId: "thresh-event" - zookeeperConnect: "127.0.0.1:2181" - consumerId: 1 - socketTimeoutMs: 30000 - socketReceiveBufferBytes : 65536 - fetchMessageMaxBytes: 1048576 - autoCommitEnable: true - autoCommitIntervalMs: 60000 - queuedMaxMessageChunks: 10 - rebalanceMaxRetries: 4 - fetchMinBytes: 1 - fetchWaitMaxMs: 100 - rebalanceBackoffMs: 2000 - refreshLeaderBackoffMs: 200 - autoOffsetReset: largest - consumerTimeoutMs: -1 - clientId : 1 - zookeeperSessionTimeoutMs : 60000 - zookeeperConnectionTimeoutMs : 60000 - zookeeperSyncTimeMs: 2000 - - -kafkaProducerConfig: - # See http://kafka.apache.org/documentation.html#api for semantics and defaults. - topic: "alarm-state-transitions" - metadataBrokerList: "%KAFKA_HOST%:9092" - serializerClass: kafka.serializer.StringEncoder - partitionerClass: - requestRequiredAcks: 1 - requestTimeoutMs: 10000 - producerType: sync - keySerializerClass: - compressionCodec: none - compressedTopics: - messageSendMaxRetries: 3 - retryBackoffMs: 100 - topicMetadataRefreshIntervalMs: 600000 - queueBufferingMaxMs: 5000 - queueBufferingMaxMessages: 10000 - queueEnqueueTimeoutMs: -1 - batchNumMessages: 200 - sendBufferBytes: 102400 - clientId : Threshold_Engine - - -sporadicMetricNamespaces: - - foo - -database: - driverClass: %MONASCA_THRESH_DATABASE_ENGINE% - url: "jdbc:mariadb://%DATABASE_HOST%:%DATABASE_PORT%/mon" - user: "%DATABASE_USER%" - password: "%DATABASE_PASSWORD%" - properties: - ssl: false - # the maximum amount of time to wait on an empty pool before throwing an exception - maxWaitForConnection: 1s - - # the SQL query to run when validating a connection's liveness - validationQuery: "/* %DATABASE_TYPE% Health Check */ SELECT 1" - - # the minimum number of connections to keep open - minSize: 8 - - # the maximum number of connections to keep open - maxSize: 41 - - # flag indicates if Hibernate support enabled - hibernateSupport: %MONASCA_DATABASE_USE_ORM% - - # hibernate provider class - providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider - - # database name - databaseName: mon - - # server name/address - serverName: %DATABASE_HOST% - - # server port number - portNumber: %DATABASE_PORT% - - # hibernate auto configuretion parameter - autoConfig: validate diff --git a/devstack/files/schema/influxdb_setup.py b/devstack/files/schema/influxdb_setup.py deleted file mode 100644 index d18beb447..000000000 --- a/devstack/files/schema/influxdb_setup.py +++ /dev/null @@ -1,138 +0,0 @@ -# -# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""A simple script to setup influxdb user and roles. At some point this should - become a more full featured module. Also this assumes that none of the - python based influxdb clients are available on this system. -""" - -import json -import sys - -from oslo_utils.encodeutils import safe_decode -from oslo_utils.encodeutils import safe_encode -from six.moves import urllib -import six.moves.urllib.parse as urlparse - -ADMIN = 'root' -ADMIN_PASS = 'root' -DBNAME = 'mon' -USERS = {} -USERS['mon_api'] = 'password' -USERS['mon_persister'] = 'password' - -URL = 'http://127.0.0.1:8086' - -SHARDSPACE_NAME = 'persister_all' -REPLICATION = 1 -RETENTION = '90d' - - -def format_response(req): - try: - json_value = json.loads(req.read()) - if (len(json_value['results'][0]) > 0 and - 'series' in json_value['results'][0] and - 'values' in json_value['results'][0]['series'][0]): - return json_value['results'][0]['series'][0]['values'] - else: - return [] - except KeyError: - print("Query returned a non-successful result: {0}".format(json_value['results'])) - raise - - -def influxdb_get(uri, query, db=None): - """Runs a query via HTTP GET and returns the response as a Python list.""" - - getparams = {"q": query} - if db: - getparams['db'] = db - - try: - params = urlparse.urlencode(getparams) - uri = "{}&{}".format(uri, params) - req = urllib.request.urlopen(uri) - return format_response(req) - - except KeyError: - sys.exit(1) - - -def influxdb_get_post(uri, query, db=None): - """Runs a query using HTTP GET or POST and returns the response as a Python list. - At some InfluxDB release several ops changed from using GET to POST. For example, - CREATE DATABASE. To maintain backward compatibility, this function first trys the - query using POST and if that fails it retries again using GET. - """ - - query_params = {"q": query} - if db: - query_params['db'] = db - - try: - encoded_params = safe_encode(urlparse.urlencode(query_params)) - try: - req = urllib.request.urlopen(uri, encoded_params) - return format_response(req) - - except urllib.error.HTTPError: - uri = "{}&{}".format(uri, encoded_params) - req = urllib.request.urlopen(uri) - return format_response(req) - - except KeyError: - sys.exit(1) - - -def main(argv=None): - """If necessary, create the database, retention policy, and users""" - auth_str = '?u=%s&p=%s' % (ADMIN, ADMIN_PASS) - api_uri = "{0}/query{1}".format(URL, auth_str) - - # List current databases - dbs = influxdb_get(uri=api_uri, query="SHOW DATABASES") - if [DBNAME] not in dbs: - print("Creating database '{}'".format(DBNAME)) - influxdb_get_post(uri=api_uri, query="CREATE DATABASE {0}".format(DBNAME)) - print("...created!") - - # Check retention policy - policies = influxdb_get(uri=api_uri, - query="SHOW RETENTION POLICIES ON {0}".format(DBNAME)) - if not any(pol[0] == SHARDSPACE_NAME for pol in policies): - # Set retention policy - policy = ("CREATE RETENTION POLICY {0} ON {1} DURATION {2} " - "REPLICATION {3} DEFAULT".format(SHARDSPACE_NAME, - DBNAME, - RETENTION, - REPLICATION) - ) - influxdb_get_post(uri=api_uri, db=DBNAME, query=policy) - - # Create the users - users = influxdb_get(uri=api_uri, query="SHOW USERS", db=DBNAME) - for name, password in USERS.items(): - if not any(user[0] == name for user in users): - influxdb_get_post(uri=api_uri, - query=safe_decode("CREATE USER {0} WITH PASSWORD '{1}'" - .format(name, password)), - db=DBNAME) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/devstack/files/storm.yaml b/devstack/files/storm.yaml deleted file mode 100644 index 591838395..000000000 --- a/devstack/files/storm.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -### base -java.library.path: "/usr/local/lib:/opt/local/lib:/usr/lib" -storm.local.dir: "/var/storm" - -### zookeeper.* -storm.zookeeper.servers: - - "127.0.0.1" -storm.zookeeper.port: 2181 -storm.zookeeper.retry.interval: 5000 -storm.zookeeper.retry.times: 60 -storm.zookeeper.root: /storm -storm.zookeeper.session.timeout: 3000 - -### supervisor.* configs are for node supervisors -supervisor.slots.ports: - - 6701 - - 6702 -supervisor.childopts: -Xmx256m - -### worker.* configs are for task workers -worker.childopts: -Xmx1280m -XX:+UseConcMarkSweepGC -Dcom.sun.management.jmxremote - -### nimbus.* configs are for the master -nimbus.seeds: ["127.0.0.1"] -nimbus.thrift.port: 6627 -nimbus.childopts: -Xmx256m - -### ui.* configs are for the master -ui.host: %STORM_UI_HOST% -ui.port: %STORM_UI_PORT% -ui.childopts: -Xmx768m - -### logviewer.* configs are for the master -logviewer.port: %STORM_LOGVIEWER_PORT% -logviewer.childopts: -Xmx128m - -### drpc.* configs - -### transactional.* configs -transactional.zookeeper.servers: - - "127.0.0.1" -transactional.zookeeper.port: 2181 -transactional.zookeeper.root: /storm-transactional - -### topology.* configs are for specific executing storms -topology.acker.executors: 1 -topology.debug: False diff --git a/devstack/files/vertica/mon_alarms.sql b/devstack/files/vertica/mon_alarms.sql deleted file mode 100644 index 518d7af13..000000000 --- a/devstack/files/vertica/mon_alarms.sql +++ /dev/null @@ -1,14 +0,0 @@ -CREATE SCHEMA MonAlarms; - -CREATE TABLE MonAlarms.StateHistory( - id AUTO_INCREMENT, - tenant_id VARCHAR, - alarm_id VARCHAR, - metrics VARCHAR (65000), - old_state VARCHAR, - new_state VARCHAR, - sub_alarms VARCHAR (65000), - reason VARCHAR(65000), - reason_data VARCHAR(65000), - time_stamp TIMESTAMP NOT NULL -) PARTITION BY EXTRACT('year' FROM time_stamp)*10000 + EXTRACT('month' FROM time_stamp)*100 + EXTRACT('day' FROM time_stamp); diff --git a/devstack/files/vertica/mon_metrics.sql b/devstack/files/vertica/mon_metrics.sql deleted file mode 100644 index c8b5a4ae5..000000000 --- a/devstack/files/vertica/mon_metrics.sql +++ /dev/null @@ -1,105 +0,0 @@ -DROP SCHEMA MonMetrics CASCADE; - -CREATE SCHEMA MonMetrics; - -CREATE TABLE MonMetrics.Measurements ( - definition_dimensions_id BINARY(20) NOT NULL, - time_stamp TIMESTAMP NOT NULL, - value FLOAT NOT NULL, - value_meta VARCHAR(2048) -) PARTITION BY EXTRACT('year' FROM time_stamp)*10000 + EXTRACT('month' FROM time_stamp)*100 + EXTRACT('day' FROM time_stamp); - -CREATE TABLE MonMetrics.Definitions( - id BINARY(20) NOT NULL, - name VARCHAR(255) NOT NULL, - tenant_id VARCHAR(255) NOT NULL, - region VARCHAR(255) NOT NULL, - PRIMARY KEY(id), - CONSTRAINT MetricsDefinitionsConstraint UNIQUE(name, tenant_id, region) -); - -CREATE TABLE MonMetrics.Dimensions ( - dimension_set_id BINARY(20) NOT NULL, - name VARCHAR(255) NOT NULL, - value VARCHAR(255) NOT NULL, - CONSTRAINT MetricsDimensionsConstraint UNIQUE(dimension_set_id, name, value) -); - -CREATE TABLE MonMetrics.DefinitionDimensions ( - id BINARY(20) NOT NULL, - definition_id BINARY(20) NOT NULL, - dimension_set_id BINARY(20) NOT NULL, - CONSTRAINT MetricsDefinitionDimensionsConstraint UNIQUE(definition_id, dimension_set_id) - ); - --- Projections --- ** These are for a single node system with no k safety - -CREATE PROJECTION Measurements_DBD_1_rep_MonMetrics /*+createtype(D)*/ -( - definition_dimensions_id ENCODING RLE, - time_stamp ENCODING DELTAVAL, - value ENCODING AUTO, - value_meta ENCODING RLE -) -AS - SELECT definition_dimensions_id, - time_stamp, - value, - value_meta - FROM MonMetrics.Measurements - ORDER BY definition_dimensions_id, - time_stamp, - value_meta -UNSEGMENTED ALL NODES; - -CREATE PROJECTION Definitions_DBD_2_rep_MonMetrics /*+createtype(D)*/ -( - id ENCODING AUTO, - name ENCODING AUTO, - tenant_id ENCODING RLE, - region ENCODING RLE -) -AS - SELECT id, - name, - tenant_id, - region - FROM MonMetrics.Definitions - ORDER BY region, - tenant_id, - name -UNSEGMENTED ALL NODES; - -CREATE PROJECTION Dimensions_DBD_3_rep_MonMetrics /*+createtype(D)*/ -( - dimension_set_id ENCODING AUTO, - name ENCODING RLE, - value ENCODING AUTO -) -AS - SELECT dimension_set_id, - name, - value - FROM MonMetrics.Dimensions - ORDER BY name, - value, - dimension_set_id -UNSEGMENTED ALL NODES; - -CREATE PROJECTION DefinitionDimensions_DBD_4_rep_MonMetrics /*+createtype(D)*/ -( - id ENCODING AUTO, - definition_id ENCODING RLE, - dimension_set_id ENCODING AUTO -) -AS - SELECT id, - definition_id, - dimension_set_id - FROM MonMetrics.DefinitionDimensions - ORDER BY definition_id, - dimension_set_id -UNSEGMENTED ALL NODES; - -select refresh('MonMetrics.Measurements, MonMetrics.Definitions, MonMetrics.Dimensions, MonMetrics.DefinitionDimensions'); diff --git a/devstack/files/vertica/roles.sql b/devstack/files/vertica/roles.sql deleted file mode 100644 index bc9e30321..000000000 --- a/devstack/files/vertica/roles.sql +++ /dev/null @@ -1,18 +0,0 @@ -CREATE ROLE monasca_persister; -CREATE ROLE monasca_api; - -GRANT USAGE ON SCHEMA MonMetrics TO monasca_persister; -GRANT USAGE ON SCHEMA MonAlarms TO monasca_persister; -GRANT ALL ON TABLE MonMetrics.Measurements TO monasca_persister; -GRANT ALL ON TABLE MonMetrics.Definitions TO monasca_persister; -GRANT ALL ON TABLE MonMetrics.Dimensions TO monasca_persister; -GRANT ALL ON TABLE MonMetrics.DefinitionDimensions TO monasca_persister; -GRANT ALL ON TABLE MonAlarms.StateHistory TO monasca_persister; - -GRANT USAGE ON SCHEMA MonMetrics TO monasca_api; -GRANT USAGE ON SCHEMA MonAlarms TO monasca_api; -GRANT SELECT ON TABLE MonMetrics.Measurements TO monasca_api; -GRANT SELECT ON TABLE MonMetrics.Definitions TO monasca_api; -GRANT SELECT ON TABLE MonMetrics.Dimensions TO monasca_api; -GRANT SELECT ON TABLE MonMetrics.DefinitionDimensions TO monasca_api; -GRANT ALL ON TABLE MonAlarms.StateHistory TO monasca_api; diff --git a/devstack/files/vertica/users.sql b/devstack/files/vertica/users.sql deleted file mode 100644 index f19413548..000000000 --- a/devstack/files/vertica/users.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE USER mon_api IDENTIFIED BY 'password'; -GRANT monasca_api TO mon_api; -ALTER USER mon_api DEFAULT ROLE monasca_api; - -CREATE USER mon_persister IDENTIFIED BY 'password'; -GRANT monasca_persister TO mon_persister; -ALTER USER mon_persister DEFAULT ROLE monasca_persister; diff --git a/devstack/files/zookeeper/log4j.properties b/devstack/files/zookeeper/log4j.properties deleted file mode 100644 index edce7b14a..000000000 --- a/devstack/files/zookeeper/log4j.properties +++ /dev/null @@ -1,68 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# Copyright 2020 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# ZooKeeper Logging Configuration -# - -# Format is " (, )+ - -log4j.rootLogger=INFO, CONSOLE, ROLLINGFILE, TRACEFILE - -# Example: console appender only -# log4j.rootLogger=INFO, CONSOLE - -# Example with rolling log file -#log4j.rootLogger=DEBUG, CONSOLE, ROLLINGFILE - -# Example with rolling log file and tracing -#log4j.rootLogger=TRACE, CONSOLE, ROLLINGFILE, TRACEFILE - -# -# Log INFO level and above messages to the console -# -log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender -log4j.appender.CONSOLE.Threshold=INFO -log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout -log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n - -# -# Add ROLLINGFILE to rootLogger to get log file output -# Log DEBUG level and above messages to a log file -log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender -log4j.appender.ROLLINGFILE.Threshold=WARN -log4j.appender.ROLLINGFILE.File=${zookeeper.log.dir}/zookeeper.log - -# Max log file size of 10MB -log4j.appender.ROLLINGFILE.MaxFileSize=10MB -# uncomment the next line to limit number of backup files -#log4j.appender.ROLLINGFILE.MaxBackupIndex=10 - -log4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout -log4j.appender.ROLLINGFILE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n - - -# -# Add TRACEFILE to rootLogger to get log file output -# Log DEBUG level and above messages to a log file -log4j.appender.TRACEFILE=org.apache.log4j.FileAppender -log4j.appender.TRACEFILE.Threshold=TRACE -log4j.appender.TRACEFILE.File=${zookeeper.log.dir}/zookeeper_trace.log - -log4j.appender.TRACEFILE.layout=org.apache.log4j.PatternLayout -### Notice we are including log4j's NDC here (%x) -log4j.appender.TRACEFILE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L][%x] - %m%n diff --git a/devstack/files/zookeeper/zoo.cfg b/devstack/files/zookeeper/zoo.cfg deleted file mode 100644 index ded5d6962..000000000 --- a/devstack/files/zookeeper/zoo.cfg +++ /dev/null @@ -1,74 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# http://hadoop.apache.org/zookeeper/docs/current/zookeeperAdmin.html - -# The number of milliseconds of each tick -tickTime=2000 -# The number of ticks that the initial -# synchronization phase can take -initLimit=10 -# The number of ticks that can pass between -# sending a request and getting an acknowledgement -syncLimit=5 -# the directory where the snapshot is stored. -dataDir=/var/lib/zookeeper -# Place the dataLogDir to a separate physical disc for better performance -# dataLogDir=/disk2/zookeeper - -# the port at which the clients will connect -clientPort=2181 - -# Maximum number of clients that can connect from one client -maxClientCnxns=60 - -# specify all zookeeper servers -# The fist port is used by followers to connect to the leader -# The second one is used for leader election - -server.0=127.0.0.1:2888:3888 - -# To avoid seeks ZooKeeper allocates space in the transaction log file in -# blocks of preAllocSize kilobytes. The default block size is 64M. One reason -# for changing the size of the blocks is to reduce the block size if snapshots -# are taken more often. (Also, see snapCount). -#preAllocSize=65536 - -# Clients can submit requests faster than ZooKeeper can process them, -# especially if there are a lot of clients. To prevent ZooKeeper from running -# out of memory due to queued requests, ZooKeeper will throttle clients so that -# there is no more than globalOutstandingLimit outstanding requests in the -# system. The default limit is 1,000.ZooKeeper logs transactions to a -# transaction log. After snapCount transactions are written to a log file a -# snapshot is started and a new transaction log file is started. The default -# snapCount is 10,000. -#snapCount=1000 - -# If this option is defined, requests will be will logged to a trace file named -# traceFile.year.month.day. -#traceFile= - -# Leader accepts client connections. Default value is "yes". The leader machine -# coordinates updates. For higher update throughput at thes slight expense of -# read throughput the leader can be configured to not accept clients and focus -# on coordination. -#leaderServes=yes - -# Autopurge every hour to avoid using lots of disk in bursts -# Order of the next 2 properties matters. -# autopurge.snapRetainCount must be before autopurge.purgeInterval. -autopurge.snapRetainCount=3 -autopurge.purgeInterval=1 diff --git a/devstack/files/zookeeper/zookeeper.service b/devstack/files/zookeeper/zookeeper.service deleted file mode 100644 index 97f2e1da0..000000000 --- a/devstack/files/zookeeper/zookeeper.service +++ /dev/null @@ -1,20 +0,0 @@ -[Unit] -Description=Zookeeper Daemon -Documentation=http://zookeeper.apache.org -Requires=network.target -After=network.target - -[Service] -Type=forking -WorkingDirectory=/opt/zookeeper -User=zookeeper -Group=zookeeper -Environment=ZOO_LOG_DIR=/var/log/zookeeper -ExecStart=/opt/zookeeper/bin/zkServer.sh start /opt/zookeeper/conf/zoo.cfg -ExecStop=/opt/zookeeper/bin/zkServer.sh stop /opt/zookeeper/conf/zoo.cfg -ExecReload=/opt/zookeeper/bin/zkServer.sh restart /opt/zookeeper/conf/zoo.cfg -TimeoutSec=30 -Restart=on-failure - -[Install] -WantedBy=default.target diff --git a/devstack/lib/client.sh b/devstack/lib/client.sh deleted file mode 100644 index 5ae21fd18..000000000 --- a/devstack/lib/client.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_MON_CLIENT=$(set +o | grep xtrace) -set +o xtrace - -install_monascaclient() { - if python3_enabled; then - enable_python3_package python-monascaclient - fi - git_clone $MONASCA_CLIENT_REPO $MONASCA_CLIENT_DIR $MONASCA_CLIENT_BRANCH - setup_dev_lib "python-monascaclient" - - # install completion file - monasca complete > /tmp/monasca.bash_completion - sudo install -D -m 0644 -o $STACK_USER /tmp/monasca.bash_completion $MONASCA_COMPLETION_FILE - rm -rf /tmp/monasca.bash_completion -} - -clean_monascaclient() { - sudo rm -rf $MONASCA_COMPLETION_FILE -} - -${_XTRACE_MON_CLIENT} diff --git a/devstack/lib/constants.sh b/devstack/lib/constants.sh deleted file mode 100644 index d829aa33a..000000000 --- a/devstack/lib/constants.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_MON_CONST=$(set +o | grep xtrace) -set +o xtrace - -# Location of python-monascaclient completion file -MONASCA_COMPLETION_FILE=/etc/bash_completion.d/monasca.bash_completion - -# Location of monasca-profile -MONASCA_PROFILE_FILE=/etc/profile.d/monasca.sh - -# monasca_service_type, used in: -# keystone endpoint creation -# configuration files -MONASCA_SERVICE_TYPE=monitoring - -${_XTRACE_MON_CONST} diff --git a/devstack/lib/monasca-log.sh b/devstack/lib/monasca-log.sh deleted file mode 100644 index 5b8a86c1d..000000000 --- a/devstack/lib/monasca-log.sh +++ /dev/null @@ -1,804 +0,0 @@ -#!/bin/bash - -# -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -_XTRACE_MON_LOG=$(set +o | grep xtrace) -set +o xtrace - -_ERREXIT_MON_LOG=$(set +o | grep errexit) -set -o errexit - -# configuration bits of various services -LOG_PERSISTER_DIR=$DEST/monasca-log-persister -LOG_TRANSFORMER_DIR=$DEST/monasca-log-transformer -LOG_METRICS_DIR=$DEST/monasca-log-metrics -LOG_AGENT_DIR=$DEST/monasca-log-agent - -ELASTICSEARCH_DIR=$DEST/elasticsearch -ELASTICSEARCH_CFG_DIR=$ELASTICSEARCH_DIR/config -ELASTICSEARCH_LOG_DIR=$LOGDIR/elasticsearch -ELASTICSEARCH_DATA_DIR=$DATA_DIR/elasticsearch - -KIBANA_DIR=$DEST/kibana -KIBANA_CFG_DIR=$KIBANA_DIR/config -KIBANA_DEV_DIR=$DEST/kibana_dev -KIBANA_DEV_NODE_JS_VERSION=${KIBANA_DEV_NODE_JS_VERSION:-"10.15.2"} - -LOGSTASH_DIR=$DEST/logstash -LOGSTASH_DATA_DIR=$DEST/logstash-data - -ES_SERVICE_BIND_HOST=${ES_SERVICE_BIND_HOST:-${SERVICE_HOST}} -ES_SERVICE_BIND_PORT=${ES_SERVICE_BIND_PORT:-9200} -ES_SERVICE_PUBLISH_HOST=${ES_SERVICE_PUBLISH_HOST:-${SERVICE_HOST}} -ES_SERVICE_PUBLISH_PORT=${ES_SERVICE_PUBLISH_PORT:-9300} - -KIBANA_SERVICE_HOST=${KIBANA_SERVICE_HOST:-${SERVICE_HOST}} -KIBANA_SERVICE_PORT=${KIBANA_SERVICE_PORT:-5601} -KIBANA_SERVER_BASE_PATH=${KIBANA_SERVER_BASE_PATH:-"/dashboard/monitoring/logs_proxy"} - -# Settings needed for Elasticsearch -# Elasticsearch uses a lot of file descriptors or file handles. -# Increase the limit on the number of open files descriptors for the user running Elasticsearch to 65,536 or higher. -LIMIT_NOFILE=${LIMIT_NOFILE:-65535} -# Elasticsearch uses a mmapfs directory by default to store its indices. -# The default operating system limits on mmap counts is likely to be too low, -# which may result in out of memory exceptions, increase to at least 262144. -VM_MAX_MAP_COUNT=${VM_MAX_MAP_COUNT:-262144} - -MONASCA_LOG_API_BASE_URI=https://${MONASCA_API_BASE_URI}/logs - - -run_process_sleep() { - local name=$1 - local cmd=$2 - local sleepTime=${3:-1} - run_process "$name" "$cmd" - sleep ${sleepTime} -} - -is_logstash_required() { - is_service_enabled monasca-log-persister \ - || is_service_enabled monasca-log-transformer \ - || is_service_enabled monasca-log-metrics \ - || is_service_enabled monasca-log-agent \ - && return 0 -} - -# TOP_LEVEL functions called from devstack coordinator -############################################################################### -function pre_install_logs_services { - install_elk - install_nodejs - install_gate_config_holder -} - -function install_monasca_log { - configure_nvm - configure_yarn - build_kibana_plugin - install_log_agent - if $USE_OLD_LOG_API = true; then - install_old_log_api - fi -} - -function install_elk { - install_logstash - install_elasticsearch - install_kibana -} - -function install_gate_config_holder { - sudo install -d -o $STACK_USER $GATE_CONFIGURATION_DIR -} - -function install_monasca_statsd { - if use_library_from_git "monasca-statsd"; then - git_clone_by_name "monasca-statsd" - setup_dev_lib "monasca-statsd" - fi -} - -function configure_monasca_log { - configure_kafka - configure_elasticsearch - configure_kibana - install_kibana_plugin - if $USE_OLD_LOG_API = true; then - configure_old_monasca_log_api - fi - configure_monasca_log_api - configure_monasca_log_transformer - configure_monasca_log_metrics - configure_monasca_log_persister - configure_monasca_log_agent - - -} - -function init_monasca_log { - enable_log_management -} - -function init_monasca_grafana_dashboards { - if is_service_enabled horizon; then - echo_summary "Init Grafana dashboards" - - sudo python "${PLUGIN_FILES}"/grafana/grafana.py "${PLUGIN_FILES}"/grafana/dashboards.d - fi -} - -function install_old_log_api { - - if python3_enabled; then - enable_python3_package monasca-log-api - fi - - echo_summary "Installing monasca-log-api" - - git_clone $MONASCA_LOG_API_REPO $MONASCA_LOG_API_DIR $MONASCA_LOG_API_BRANCH - setup_develop $MONASCA_LOG_API_DIR - - install_keystonemiddleware - install_monasca_statsd - - if [ "$MONASCA_LOG_API_DEPLOY" == "mod_wsgi" ]; then - install_apache_wsgi - elif [ "$MONASCA_LOG_API_DEPLOY" == "uwsgi" ]; then - pip_install uwsgi - else - pip_install gunicorn - fi - - if [ "$MONASCA_LOG_API_DEPLOY" != "gunicorn" ]; then - if is_ssl_enabled_service "monasca-log-api"; then - enable_mod_ssl - fi - fi - -} - - -function configure_old_monasca_log_api { - MONASCA_LOG_API_BIN_DIR=$(get_python_exec_prefix) - MONASCA_LOG_API_WSGI=$MONASCA_LOG_API_BIN_DIR/monasca-log-api-wsgi - - if is_service_enabled monasca-log-api; then - echo_summary "Configuring monasca-log-api" - rm -rf $MONASCA_LOG_API_UWSGI_CONF - install -m 600 $MONASCA_LOG_API_DIR/etc/monasca/log-api-uwsgi.ini $MONASCA_LOG_API_UWSGI_CONF - - write_uwsgi_config "$MONASCA_LOG_API_UWSGI_CONF" "$MONASCA_LOG_API_WSGI" "/logs" - - fi -} - -function configure_old_monasca_log_api_core { - # Put config files in ``$MONASCA_LOG_API_CONF_DIR`` for everyone to find - sudo install -d -o $STACK_USER $MONASCA_LOG_API_CONF_DIR - sudo install -m 700 -d -o $STACK_USER $MONASCA_LOG_API_CACHE_DIR - sudo install -d -o $STACK_USER $MONASCA_LOG_API_LOG_DIR - - # ensure fresh installation of configuration files - rm -rf $MONASCA_LOG_API_CONF $MONASCA_LOG_API_PASTE $MONASCA_LOG_API_LOGGING_CONF - - $MONASCA_LOG_API_BIN_DIR/oslo-config-generator \ - --config-file $MONASCA_LOG_API_DIR/config-generator/monasca-log-api.conf \ - --output-file /tmp/monasca-log-api.conf - - install -m 600 /tmp/monasca-log-api.conf $MONASCA_LOG_API_CONF && rm -rf /tmp/monasca-log-api.conf - install -m 600 $MONASCA_LOG_API_DIR/etc/monasca/log-api-paste.ini $MONASCA_LOG_API_PASTE - install -m 600 $MONASCA_LOG_API_DIR/etc/monasca/log-api-logging.conf $MONASCA_LOG_API_LOGGING_CONF - - # configure monasca-log-api.conf - iniset "$MONASCA_LOG_API_CONF" DEFAULT log_config_append $MONASCA_LOG_API_LOGGING_CONF - iniset "$MONASCA_LOG_API_CONF" service region $REGION_NAME - - iniset "$MONASCA_LOG_API_CONF" log_publisher kafka_url $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT - iniset "$MONASCA_LOG_API_CONF" log_publisher topics log - - iniset "$MONASCA_LOG_API_CONF" kafka_healthcheck kafka_url $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT - iniset "$MONASCA_LOG_API_CONF" kafka_healthcheck kafka_topics log - - iniset "$MONASCA_LOG_API_CONF" roles_middleware path "/v2.0/log" - iniset "$MONASCA_LOG_API_CONF" roles_middleware default_roles monasca-user - iniset "$MONASCA_LOG_API_CONF" roles_middleware agent_roles monasca-agent - iniset "$MONASCA_LOG_API_CONF" roles_middleware delegate_roles admin - - # configure keystone middleware - configure_auth_token_middleware "$MONASCA_LOG_API_CONF" "admin" $MONASCA_LOG_API_CACHE_DIR - iniset "$MONASCA_LOG_API_CONF" keystone_authtoken region_name $REGION_NAME - iniset "$MONASCA_LOG_API_CONF" keystone_authtoken project_name "admin" - iniset "$MONASCA_LOG_API_CONF" keystone_authtoken password $ADMIN_PASSWORD - - # insecure - if is_service_enabled tls-proxy; then - iniset "$MONASCA_LOG_API_CONF" keystone_authtoken insecure False - fi - - # configure log-api-paste.ini - iniset "$MONASCA_LOG_API_PASTE" server:main bind $MONASCA_LOG_API_SERVICE_HOST:$MONASCA_LOG_API_SERVICE_PORT - iniset "$MONASCA_LOG_API_PASTE" server:main chdir $MONASCA_LOG_API_DIR - iniset "$MONASCA_LOG_API_PASTE" server:main workers $API_WORKERS -} - -function init_agent { - echo_summary "Init Monasca agent" - - sudo cp -f "${PLUGIN_FILES}"/monasca-agent/http_check.yaml /etc/monasca/agent/conf.d/http_check.yaml - sudo cp -f "${PLUGIN_FILES}"/monasca-agent/process.yaml /etc/monasca/agent/conf.d/process.yaml - sudo cp -f "${PLUGIN_FILES}"/monasca-agent/elastic.yaml /etc/monasca/agent/conf.d/elastic.yaml - - sudo sed -i "s/{{IP}}/$(ip -o -4 addr list eth1 | awk '{print $4}' | cut -d/ -f1 | head -1)/" /etc/monasca/agent/conf.d/*.yaml - sudo sed -i "s/127\.0\.0\.1/$(hostname)/" /etc/monasca/agent/conf.d/*.yaml - sudo systemctl restart monasca-collector -} - -function stop_monasca_log { - stop_process "monasca-log-agent" || true - stop_monasca_log_api - stop_process "monasca-log-metrics" || true - stop_process "monasca-log-persister" || true - stop_process "monasca-log-transformer" || true - stop_process "kibana" || true - stop_process "elasticsearch" || true -} - -function start_monasca_log { - start_elasticsearch - start_kibana - start_monasca_log_transformer - start_monasca_log_metrics - start_monasca_log_persister - if $USE_OLD_LOG_API = true; then - start_monasca_log_api - fi - start_monasca_log_agent -} - -function clean_monasca_log { - clean_monasca_log_agent - clean_monasca_log_api - clean_monasca_log_persister - clean_monasca_log_transformer - clean_kibana - clean_elasticsearch - clean_logstash - clean_nodejs - clean_nvm - clean_yarn - clean_gate_config_holder -} -############################################################################### - -function configure_monasca_log_api { - if is_service_enabled monasca-log; then - echo_summary "Configuring monasca-api" - iniset "$MONASCA_API_CONF" DEFAULT enable_logs_api "true" - iniset "$MONASCA_API_CONF" kafka logs_topics "log" - - create_log_management_accounts - fi -} - -function install_logstash { - if is_logstash_required; then - echo_summary "Installing Logstash ${LOGSTASH_VERSION}" - - local logstash_tarball=logstash-oss-${LOGSTASH_VERSION}.tar.gz - local logstash_url=https://artifacts.elastic.co/downloads/logstash/${logstash_tarball} - - local logstash_dest - logstash_dest=`get_extra_file ${logstash_url}` - - tar xzf ${logstash_dest} -C $DEST - - sudo chown -R $STACK_USER $DEST/logstash-${LOGSTASH_VERSION} - sudo ln -sf $DEST/logstash-${LOGSTASH_VERSION} $LOGSTASH_DIR - - sudo mkdir -p $LOGSTASH_DATA_DIR - sudo chown $STACK_USER:monasca $LOGSTASH_DATA_DIR - fi -} - -function clean_logstash { - if is_logstash_required; then - echo_summary "Cleaning Logstash ${LOGSTASH_VERSION}" - - sudo rm -rf $LOGSTASH_DIR || true - sudo rm -rf $FILES/logstash-${LOGSTASH_VERSION}.tar.gz || true - sudo rm -rf $DEST/logstash-${LOGSTASH_VERSION} || true - fi -} - -function install_elasticsearch { - if is_service_enabled elasticsearch; then - echo_summary "Installing ElasticSearch ${ELASTICSEARCH_VERSION}" - - local es_tarball=elasticsearch-oss-${ELASTICSEARCH_VERSION}-linux-x86_64.tar.gz - local es_url=https://artifacts.elastic.co/downloads/elasticsearch/${es_tarball} - - local es_dest - es_dest=`get_extra_file ${es_url}` - - tar xzf ${es_dest} -C $DEST - - sudo chown -R $STACK_USER $DEST/elasticsearch-${ELASTICSEARCH_VERSION} - sudo ln -sf $DEST/elasticsearch-${ELASTICSEARCH_VERSION} $ELASTICSEARCH_DIR - fi -} - -function configure_elasticsearch { - if is_service_enabled elasticsearch; then - echo_summary "Configuring ElasticSearch ${ELASTICSEARCH_VERSION}" - - local templateDir=$ELASTICSEARCH_CFG_DIR/templates - - for dir in $ELASTICSEARCH_LOG_DIR $templateDir $ELASTICSEARCH_DATA_DIR; do - sudo install -m 755 -d -o $STACK_USER $dir - done - - sudo cp -f "${PLUGIN_FILES}"/elasticsearch/elasticsearch.yml $ELASTICSEARCH_CFG_DIR/elasticsearch.yml - sudo chown -R $STACK_USER $ELASTICSEARCH_CFG_DIR/elasticsearch.yml - sudo chmod 0644 $ELASTICSEARCH_CFG_DIR/elasticsearch.yml - - sudo sed -e " - s|%ES_SERVICE_BIND_HOST%|$ES_SERVICE_BIND_HOST|g; - s|%ES_SERVICE_BIND_PORT%|$ES_SERVICE_BIND_PORT|g; - s|%ES_DATA_DIR%|$ELASTICSEARCH_DATA_DIR|g; - s|%ES_LOG_DIR%|$ELASTICSEARCH_LOG_DIR|g; - " -i $ELASTICSEARCH_CFG_DIR/elasticsearch.yml - - sudo ln -sf $ELASTICSEARCH_CFG_DIR/elasticsearch.yml $GATE_CONFIGURATION_DIR/elasticsearch.yml - - echo "[Service]" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null - echo "LimitNOFILE=$LIMIT_NOFILE" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null - - echo "vm.max_map_count=$VM_MAX_MAP_COUNT" | sudo tee --append /etc/sysctl.conf > /dev/null - sudo sysctl -w vm.max_map_count=$VM_MAX_MAP_COUNT - fi -} - -function clean_elasticsearch { - if is_service_enabled elasticsearch; then - echo_summary "Cleaning Elasticsearch ${ELASTICSEARCH_VERSION}" - - sudo rm -rf ELASTICSEARCH_DIR || true - sudo rm -rf ELASTICSEARCH_CFG_DIR || true - sudo rm -rf ELASTICSEARCH_LOG_DIR || true - sudo rm -rf ELASTICSEARCH_DATA_DIR || true - sudo rm -rf $FILES/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz || true - sudo rm -rf $DEST/elasticsearch-${ELASTICSEARCH_VERSION} || true - fi -} - -function start_elasticsearch { - if is_service_enabled elasticsearch; then - echo_summary "Starting ElasticSearch ${ELASTICSEARCH_VERSION}" - # 5 extra seconds to ensure that ES started properly - local esSleepTime=${ELASTICSEARCH_SLEEP_TIME:-5} - run_process_sleep "elasticsearch" "$ELASTICSEARCH_DIR/bin/elasticsearch -E logger.org.elasticsearch=DEBUG" $esSleepTime - fi -} - -function _get_kibana_version_name { - echo "kibana-${KIBANA_VERSION}-linux-x86_64" -} - -function _get_kibana_oss_version_name { - echo "kibana-oss-${KIBANA_VERSION}-linux-x86_64" -} - -function install_kibana { - if is_service_enabled kibana; then - echo_summary "Installing Kibana ${KIBANA_VERSION}" - - local kibana_oss_version_name - kibana_oss_version_name=`_get_kibana_oss_version_name` - local kibana_tarball=${kibana_oss_version_name}.tar.gz - local kibana_tarball_url=https://artifacts.elastic.co/downloads/kibana/${kibana_tarball} - local kibana_tarball_dest - kibana_tarball_dest=`get_extra_file ${kibana_tarball_url}` - - tar xzf ${kibana_tarball_dest} -C $DEST - - local kibana_version_name - kibana_version_name=`_get_kibana_version_name` - sudo chown -R $STACK_USER $DEST/${kibana_version_name} - sudo ln -sf $DEST/${kibana_version_name} $KIBANA_DIR - fi -} - -function configure_kibana { - if is_service_enabled kibana; then - echo_summary "Configuring Kibana ${KIBANA_VERSION}" - - sudo install -m 755 -d -o $STACK_USER $KIBANA_CFG_DIR - - sudo cp -f "${PLUGIN_FILES}"/kibana/kibana.yml $KIBANA_CFG_DIR/kibana.yml - sudo chown -R $STACK_USER $KIBANA_CFG_DIR/kibana.yml - sudo chmod 0644 $KIBANA_CFG_DIR/kibana.yml - - sudo sed -e " - s|%KIBANA_SERVICE_HOST%|$KIBANA_SERVICE_HOST|g; - s|%KIBANA_SERVICE_PORT%|$KIBANA_SERVICE_PORT|g; - s|%ES_SERVICE_BIND_HOST%|$ES_SERVICE_BIND_HOST|g; - s|%ES_SERVICE_BIND_PORT%|$ES_SERVICE_BIND_PORT|g; - s|%KIBANA_SERVER_BASE_PATH%|$KIBANA_SERVER_BASE_PATH|g; - s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI|g; - " -i $KIBANA_CFG_DIR/kibana.yml - - sudo ln -sf $KIBANA_CFG_DIR/kibana.yml $GATE_CONFIGURATION_DIR/kibana.yml - fi -} - -function clean_kibana { - if is_service_enabled kibana; then - echo_summary "Cleaning Kibana ${KIBANA_VERSION}" - - local kibana_tarball - kibana_tarball=`_get_kibana_oss_version_name`.tar.gz - sudo rm -rf $KIBANA_DIR || true - sudo rm -rf $FILES/${kibana_tarball} || true - sudo rm -rf $KIBANA_CFG_DIR || true - fi -} - -function start_kibana { - if is_service_enabled kibana; then - echo_summary "Starting Kibana ${KIBANA_VERSION}" - local kibanaSleepTime=${KIBANA_SLEEP_TIME:-120} # kibana takes some time to load up - local kibanaCFG="$KIBANA_CFG_DIR/kibana.yml" - run_process_sleep "kibana" "$KIBANA_DIR/bin/kibana --config $kibanaCFG" $kibanaSleepTime - fi -} - -function configure_nvm { - if is_service_enabled kibana; then - echo_summary "Configuring NVM" - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh | bash - source ~/.nvm/nvm.sh - nvm install $KIBANA_DEV_NODE_JS_VERSION - nvm use $KIBANA_DEV_NODE_JS_VERSION - fi -} - -function configure_yarn { - if is_service_enabled kibana; then - echo_summary "Configuring Yarn" - REPOS_UPDATED=False - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - - echo "deb https://dl.yarnpkg.com/debian/ stable main" | \ - sudo tee /etc/apt/sources.list.d/yarn.list - apt_get_update - apt_get install yarn - fi -} - -function clean_nvm { - if is_service_enabled kibana; then - echo_summary "Cleaning NVM" - rm -rf ~/.nvm - rm -rf ~/.bower - fi -} - -function clean_yarn { - if is_service_enabled kibana; then - echo_summary "Cleaning Yarn" - apt_get purge yarn - fi -} - -function build_kibana_plugin { - if is_service_enabled kibana; then - echo "Building Kibana plugin" - - echo_summary "Cloning and initializing Kibana development environment" - - git clone $KIBANA_DEV_REPO $KIBANA_DEV_DIR --branch $KIBANA_DEV_BRANCH --depth 1 - - git_clone $MONASCA_KIBANA_PLUGIN_REPO $MONASCA_KIBANA_PLUGIN_DIR $MONASCA_KIBANA_PLUGIN_BRANCH - cd $MONASCA_KIBANA_PLUGIN_DIR - git_update_branch $MONASCA_KIBANA_PLUGIN_BRANCH - cp -r $MONASCA_KIBANA_PLUGIN_DIR "$KIBANA_DEV_DIR/plugins" - local plugin_dir="$KIBANA_DEV_DIR/plugins/monasca-kibana-plugin" - - yarn --cwd $KIBANA_DEV_DIR kbn bootstrap - yarn --cwd $plugin_dir build - - local get_version_script="import json; obj = json.load(open('$plugin_dir/package.json')); print(obj['version'])" - local monasca_kibana_plugin_version - monasca_kibana_plugin_version=$(python -c "$get_version_script") - local pkg="$plugin_dir/build/monasca-kibana-plugin-$monasca_kibana_plugin_version.zip" - local easyPkg=$DEST/monasca-kibana-plugin.zip - ln $pkg $easyPkg - rm -rf $KIBANA_DEV_DIR - fi -} - -function install_kibana_plugin { - if is_service_enabled kibana; then - echo_summary "Install Kibana plugin" - # note(trebskit) that needs to happen after kibana received - # its configuration otherwise the plugin fails to be installed - local pkg=file://$DEST/monasca-kibana-plugin.zip - $KIBANA_DIR/bin/kibana-plugin install $pkg - fi -} - -function configure_monasca_log_persister { - if is_service_enabled monasca-log-persister; then - echo_summary "Configuring monasca-log-persister" - - sudo install -m 755 -d -o $STACK_USER $LOG_PERSISTER_DIR - - sudo cp -f "${PLUGIN_FILES}"/monasca-log-persister/persister.conf $LOG_PERSISTER_DIR/persister.conf - sudo chown $STACK_USER $LOG_PERSISTER_DIR/persister.conf - sudo chmod 0640 $LOG_PERSISTER_DIR/persister.conf - - sudo sed -e " - s|%ES_SERVICE_BIND_HOST%|$ES_SERVICE_BIND_HOST|g; - s|%KAFKA_SERVICE_HOST%|$KAFKA_SERVICE_HOST|g; - s|%KAFKA_SERVICE_PORT%|$KAFKA_SERVICE_PORT|g; - " -i $LOG_PERSISTER_DIR/persister.conf - - ln -sf $LOG_PERSISTER_DIR/persister.conf $GATE_CONFIGURATION_DIR/log-persister.conf - fi -} - -function clean_monasca_log_persister { - if is_service_enabled monasca-log-persister; then - echo_summary "Cleaning monasca-log-persister" - sudo rm -rf $LOG_PERSISTER_DIR || true - fi -} - -function start_monasca_log_persister { - if is_service_enabled monasca-log-persister; then - echo_summary "Starting monasca-log-persister" - local logstash="$LOGSTASH_DIR/bin/logstash" - run_process "monasca-log-persister" "$logstash -f $LOG_PERSISTER_DIR/persister.conf --path.data $LOGSTASH_DATA_DIR/monasca-log-persister" - fi -} - -function configure_monasca_log_transformer { - if is_service_enabled monasca-log-transformer; then - echo_summary "Configuring monasca-log-transformer" - - sudo install -m 755 -d -o $STACK_USER $LOG_TRANSFORMER_DIR - - sudo cp -f "${PLUGIN_FILES}"/monasca-log-transformer/transformer.conf $LOG_TRANSFORMER_DIR/transformer.conf - sudo chown $STACK_USER $LOG_TRANSFORMER_DIR/transformer.conf - sudo chmod 0640 $LOG_TRANSFORMER_DIR/transformer.conf - - sudo sed -e " - s|%KAFKA_SERVICE_HOST%|$KAFKA_SERVICE_HOST|g; - s|%KAFKA_SERVICE_PORT%|$KAFKA_SERVICE_PORT|g; - " -i $LOG_TRANSFORMER_DIR/transformer.conf - - ln -sf $LOG_TRANSFORMER_DIR/transformer.conf $GATE_CONFIGURATION_DIR/log-transformer.conf - fi -} - -function clean_monasca_log_transformer { - if is_service_enabled monasca-log-transformer; then - echo_summary "Cleaning monasca-log-transformer" - sudo rm -rf $LOG_TRANSFORMER_DIR || true - fi -} - -function start_monasca_log_transformer { - if is_service_enabled monasca-log-transformer; then - echo_summary "Starting monasca-log-transformer" - local logstash="$LOGSTASH_DIR/bin/logstash" - run_process "monasca-log-transformer" "$logstash -f $LOG_TRANSFORMER_DIR/transformer.conf --path.data $LOGSTASH_DATA_DIR/monasca-log-transformer" - fi -} - -function configure_monasca_log_metrics { - if is_service_enabled monasca-log-metrics; then - echo_summary "Configuring monasca-log-metrics" - - sudo install -m 755 -d -o $STACK_USER $LOG_METRICS_DIR - - sudo cp -f "${PLUGIN_FILES}"/monasca-log-metrics/log-metrics.conf $LOG_METRICS_DIR/log-metrics.conf - sudo chown $STACK_USER $LOG_METRICS_DIR/log-metrics.conf - sudo chmod 0640 $LOG_METRICS_DIR/log-metrics.conf - - sudo sed -e " - s|%KAFKA_SERVICE_HOST%|$KAFKA_SERVICE_HOST|g; - s|%KAFKA_SERVICE_PORT%|$KAFKA_SERVICE_PORT|g; - " -i $LOG_METRICS_DIR/log-metrics.conf - - ln -sf $LOG_METRICS_DIR/log-metrics.conf $GATE_CONFIGURATION_DIR/log-metrics.conf - fi -} - -function clean_monasca_log_metrics { - if is_service_enabled monasca-log-metrics; then - echo_summary "Cleaning monasca-log-metrics" - sudo rm -rf $LOG_METRICS_DIR || true - fi -} - -function start_monasca_log_metrics { - if is_service_enabled monasca-log-metrics; then - echo_summary "Starting monasca-log-metrics" - local logstash="$LOGSTASH_DIR/bin/logstash" - run_process "monasca-log-metrics" "$logstash -f $LOG_METRICS_DIR/log-metrics.conf --path.data $LOGSTASH_DATA_DIR/monasca-log-metrics" - fi -} - -function install_log_agent { - if is_service_enabled monasca-log-agent; then - echo_summary "Installing monasca-log-agent [logstash-output-monasca-plugin]" - - $LOGSTASH_DIR/bin/logstash-plugin install --version \ - "${LOGSTASH_OUTPUT_MONASCA_VERSION}" logstash-output-monasca_log_api - fi -} - -function configure_monasca_log_agent { - if is_service_enabled monasca-log-agent; then - echo_summary "Configuring monasca-log-agent" - - sudo install -m 755 -d -o $STACK_USER $LOG_AGENT_DIR - - sudo cp -f "${PLUGIN_FILES}"/monasca-log-agent/agent.conf $LOG_AGENT_DIR/agent.conf - sudo chown $STACK_USER $LOG_AGENT_DIR/agent.conf - sudo chmod 0640 $LOG_AGENT_DIR/agent.conf - - sudo sed -e " - s|%MONASCA_API_URI_V2%|$MONASCA_API_URI_V2|g; - s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI_V3|g; - " -i $LOG_AGENT_DIR/agent.conf - - ln -sf $LOG_AGENT_DIR/agent.conf $GATE_CONFIGURATION_DIR/log-agent.conf - - fi -} - -function clean_monasca_log_agent { - if is_service_enabled monasca-log-agent; then - echo_summary "Cleaning monasca-log-agent" - sudo rm -rf $LOG_AGENT_DIR || true - fi -} - - -function start_monasca_log_api { - if is_service_enabled monasca-log-api; then - echo_summary "Starting monasca-log-api" - - local service_port=$MONASCA_LOG_API_SERVICE_PORT - local service_protocol=$MONASCA_LOG_API_SERVICE_PROTOCOL - if is_service_enabled tls-proxy; then - service_port=$MONASCA_LOG_API_SERVICE_PORT_INT - service_protocol="http" - fi - local service_uri - - if [ "$MONASCA_LOG_API_DEPLOY" == "mod_wsgi" ]; then - local enabled_site_file - enabled_site_file=$(apache_site_config_for monasca-log-api) - service_uri=$service_protocol://$MONASCA_LOG_API_SERVICE_HOST/logs/v3.0 - if [ -f ${enabled_site_file} ]; then - enable_apache_site monasca-log-api - restart_apache_server - tail_log monasca-log-api /var/log/$APACHE_NAME/monasca-log-api.log - fi - elif [ "$MONASCA_LOG_API_DEPLOY" == "uwsgi" ]; then - service_uri=$service_protocol://$MONASCA_LOG_API_SERVICE_HOST/logs/v3.0 - run_process "monasca-log-api" "$MONASCA_LOG_API_BIN_DIR/uwsgi --ini $MONASCA_LOG_API_UWSGI_CONF" "" - else - service_uri=$service_protocol://$MONASCA_LOG_API_SERVICE_HOST:$service_port - run_process "monasca-log-api" "$MONASCA_LOG_API_BIN_DIR/gunicorn --paste $MONASCA_LOG_API_PASTE" "" - fi - - echo "Waiting for monasca-log-api to start..." - if ! wait_for_service $SERVICE_TIMEOUT $service_uri; then - die $LINENO "monasca-log-api did not start" - fi - - if is_service_enabled tls-proxy; then - start_tls_proxy monasca-log-api '*' $MONASCA_LOG_API_SERVICE_PORT $MONASCA_LOG_API_SERVICE_HOST $MONASCA_LOG_API_SERVICE_PORT_INT - fi - - restart_service memcached - fi -} - -function start_monasca_log_agent { - if is_service_enabled monasca-log-agent; then - echo_summary "Starting monasca-log-agent" - local logstash="$LOGSTASH_DIR/bin/logstash" - run_process "monasca-log-agent" "$logstash -f $LOG_AGENT_DIR/agent.conf --path.data $LOGSTASH_DATA_DIR/monasca-log-agent" "root" "root" - fi -} - -function clean_gate_config_holder { - sudo rm -rf $GATE_CONFIGURATION_DIR || true -} - -function configure_kafka { - echo_summary "Configuring Kafka topics" - for topic in ${KAFKA_SERVICE_LOG_TOPICS//,/ }; do - /opt/kafka/bin/kafka-topics.sh --create \ - --bootstrap-server $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT \ - --replication-factor 1 \ - --partitions 4 \ - --topic $topic - done -} - -function delete_kafka_topics { - echo_summary "Deleting Kafka topics" - for topic in ${KAFKA_SERVICE_LOG_TOPICS//,/ }; do - /opt/kafka/bin/kafka-topics.sh --delete \ - --bootstrap-server $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT \ - --replication-factor 1 \ - --partitions 4 \ - --topic $topic || true - done -} - -function create_log_management_accounts { - if is_service_enabled monasca-log; then - echo_summary "Enable Log Management in Keystone" - - # note(trebskit) following points to Kibana which is bad, - # but we do not have search-api in monasca-log-api now - # this code will be removed in future - local log_search_url="http://$KIBANA_SERVICE_HOST:$KIBANA_SERVICE_PORT/" - - get_or_create_service "logs" "logs" "Monasca Log service" - - if $USE_OLD_LOG_API = true; then - get_or_create_endpoint \ - "logs" \ - "$REGION_NAME" \ - "$MONASCA_LOG_API_BASE_URI" \ - "$MONASCA_LOG_API_BASE_URI" \ - "$MONASCA_LOG_API_BASE_URI" - else - get_or_create_endpoint \ - "logs" \ - "$REGION_NAME" \ - "$MONASCA_API_URI_V2" \ - "$MONASCA_API_URI_V2" \ - "$MONASCA_API_URI_V2" - - fi - - get_or_create_service "logs-search" "logs-search" "Monasca Log search service" - get_or_create_endpoint \ - "logs-search" \ - "$REGION_NAME" \ - "$log_search_url" \ - "$log_search_url" \ - "$log_search_url" - - fi -} - -#Restore errexit -${_ERREXIT_MON_LOG} - -# Restore xtrace -${_XTRACE_MON_LOG} diff --git a/devstack/lib/notification.sh b/devstack/lib/notification.sh deleted file mode 100644 index 8de615a97..000000000 --- a/devstack/lib/notification.sh +++ /dev/null @@ -1,155 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless reqmonasca_notificationred by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_MON_NOTIFICATION=$(set +o | grep xtrace) -set +o xtrace - -MONASCA_NOTIFICATION_CONF_DIR=${MONASCA_NOTIFICATION_CONF_DIR:-/etc/monasca} -MONASCA_NOTIFICATION_LOG_DIR=${MONASCA_NOTIFICATION_LOG_DIR:-/var/log/monasca/notification} -MONASCA_NOTIFICATION_CONF=${MONASCA_NOTIFICATION_CONF:-$MONASCA_NOTIFICATION_CONF_DIR/monasca-notification.conf} -MONASCA_NOTIFICATION_GATE_CFG_LINK=/etc/monasca-notification - -if [[ ${USE_VENV} = True ]]; then - PROJECT_VENV["monasca-notification"]=${MONASCA_NOTIFICATION_DIR}.venv - MONASCA_NOTIFICATION_BIN_DIR=${PROJECT_VENV["monasca-notification"]}/bin -else - MONASCA_NOTIFICATION_BIN_DIR=$(get_python_exec_prefix) -fi - -is_monasca_notification_enabled() { - is_service_enabled monasca-notification && return 0 - return 1 -} - -# NOTE(trebskit) ref: stack_install_service from devstack -install_monasca-notification() { - if ! is_monasca_notification_enabled; then - return - fi - echo_summary "Installing monasca-notification" - - git_clone ${MONASCA_NOTIFICATION_REPO} ${MONASCA_NOTIFICATION_DIR} \ - ${MONASCA_NOTIFICATION_BRANCH} - if python3_enabled; then - enable_python3_package monasca-notification - fi - setup_develop ${MONASCA_NOTIFICATION_DIR} jira_plugin - # see devstack/plugin.sh - install_monasca_common - install_monasca_statsd - # see devstack/plugin.sh - - if is_service_enabled postgresql; then - apt_get -y install libpq-dev - pip_install_gr psycopg2 - elif is_service_enabled mysql; then - apt_get -y install python-mysqldb libmysqlclient-dev - pip_install_gr PyMySQL - fi - - if [[ ${MONASCA_DATABASE_USE_ORM} == "True" ]]; then - pip_install_gr sqlalchemy - fi -} - -configure_monasca-notification() { - if ! is_monasca_notification_enabled; then - return - fi - - echo_summary "Configuring monasca-notification" - - sudo install -d -o $STACK_USER ${MONASCA_NOTIFICATION_CONF_DIR} - sudo install -d -o $STACK_USER ${MONASCA_NOTIFICATION_LOG_DIR} - - local dbDriver - local dbEngine - local dbPort - if is_service_enabled postgresql; then - dbDriver="monasca_notification.common.repositories.postgres.pgsql_repo:PostgresqlRepo" - dbEngine="postgres" - dbPort=5432 - else - dbDriver="monasca_notification.common.repositories.mysql.mysql_repo:MysqlRepo" - dbEngine="mysql" - dbPort=3306 - fi - if [[ ${MONASCA_DATABASE_USE_ORM} == "True" ]]; then - dbDriver="monasca_notification.common.repositories.orm.orm_repo:OrmRepo" - fi - - iniset "${MONASCA_NOTIFICATION_CONF}" kafka url ${SERVICE_HOST}:9092 - iniset "${MONASCA_NOTIFICATION_CONF}" database repo_driver ${dbDriver} - iniset "${MONASCA_NOTIFICATION_CONF}" email_notifier grafana_url ${SERVICE_HOST}:3000 - iniset "${MONASCA_NOTIFICATION_CONF}" keystone auth_url ${SERVICE_HOST}/identity/v3 - if is_service_enabled postgresql; then - iniset "${MONASCA_NOTIFICATION_CONF}" postgresql host ${DATABASE_HOST} - iniset "${MONASCA_NOTIFICATION_CONF}" postgresql port ${dbPort} - iniset "${MONASCA_NOTIFICATION_CONF}" postgresql user ${DATABASE_USER} - iniset "${MONASCA_NOTIFICATION_CONF}" postgresql passwd ${DATABASE_PASSWORD} - iniset "${MONASCA_NOTIFICATION_CONF}" postgresql db mon - else - iniset "${MONASCA_NOTIFICATION_CONF}" mysql host ${DATABASE_HOST} - iniset "${MONASCA_NOTIFICATION_CONF}" mysql user ${DATABASE_USER} - iniset "${MONASCA_NOTIFICATION_CONF}" mysql passwd ${DATABASE_PASSWORD} - iniset "${MONASCA_NOTIFICATION_CONF}" mysql db mon - fi - if [[ ${MONASCA_DATABASE_USE_ORM} == "True" ]]; then - iniset "${MONASCA_NOTIFICATION_CONF}" orm url ${dbEngine}://${DATABASE_USER}:${DATABASE_PASSWORD}%${DATABASE_HOST}:${dbPort}/mon - fi - - sudo install -d -o ${STACK_USER} ${MONASCA_NOTIFICATION_GATE_CFG_LINK} - ln -sf ${MONASCA_NOTIFICATION_CONF} ${MONASCA_NOTIFICATION_GATE_CFG_LINK} - - echo "postfix postfix/mailname string localhost" | sudo debconf-set-selections -v - echo "postfix postfix/main_mailer_type string 'Local only'" | sudo debconf-set-selections -v - -} - -start_monasca-notification(){ - if is_monasca_notification_enabled; then - echo_summary "Starting monasca-notification" - run_process "monasca-notification" "$MONASCA_NOTIFICATION_BIN_DIR/monasca-notification" - fi -} - -stop_monasca-notification(){ - if is_monasca_notification_enabled; then - echo_summary "Stopping monasca-notification" - stop_process "monasca-notification" || true - fi -} - -clean_monasca-notification() { - if ! is_monasca_notification_enabled; then - return - fi - - echo_summary "Configuring monasca-notification" - - sudo rm -rf ${MONASCA_NOTIFICATION_CONF} ${MONASCA_NOTIFICATION_CONF_DIR} \ - ${MONASCA_NOTIFICATION_LOG_DIR} \ - ${MONASCA_NOTIFICATION_GATE_CFG_LINK} - - if is_service_enabled postgresql; then - apt_get -y purge libpq-dev - elif is_service_enabled mysql; then - apt_get -y purge libmysqlclient-dev - apt_get -y purge python-mysqldb - fi -} - -${_XTRACE_MON_NOTIFICATION} diff --git a/devstack/lib/persister.sh b/devstack/lib/persister.sh deleted file mode 100644 index 2233b7afc..000000000 --- a/devstack/lib/persister.sh +++ /dev/null @@ -1,273 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# (C) Copyright 2017 SUSE LLC - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless reqmonasca_PERSISTERred by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_MON_PERSISTER=$(set +o | grep xtrace) -set +o xtrace - -MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-python} - -MONASCA_PERSISTER_CONF_DIR=${MONASCA_PERSISTER_CONF_DIR:-/etc/monasca} -MONASCA_PERSISTER_LOG_DIR=${MONASCA_PERSISTER_LOG_DIR:-/var/log/monasca/persister} -MONASCA_PERSISTER_GATE_CONFIG=/etc/monasca-persister - -if [ "$MONASCA_PERSISTER_IMPLEMENTATION_LANG" == "python" ]; then - if [[ ${USE_VENV} = True ]]; then - PROJECT_VENV["monasca-persister"]=${MONASCA_PERSISTER_DIR}.venv - MONASCA_PERSISTER_BIN_DIR=${PROJECT_VENV["monasca-persister"]}/bin - else - MONASCA_PERSISTER_BIN_DIR=$(get_python_exec_prefix) - fi - MONASCA_PERSISTER_CONF=${MONASCA_PERSISTER_CONF:-$MONASCA_PERSISTER_CONF_DIR/monasca-persister.conf} - MONASCA_PERSISTER_LOGGING_CONF=${MONASCA_PERSISTER_LOGGING_CONF:-$MONASCA_PERSISTER_CONF_DIR/persister-logging.conf} - - M_REPO_DRIVER_BASE=monasca_persister.repositories.${MONASCA_METRICS_DB}.metrics_repository - M_REPO_DRIVER_INFLUX=$M_REPO_DRIVER_BASE:MetricInfluxdbRepository - M_REPO_DRIVER_CASSANDRA=$M_REPO_DRIVER_BASE:MetricCassandraRepository - - AH_REPO_DRIVER_BASE=monasca_persister.repositories.${MONASCA_METRICS_DB}.alarm_state_history_repository - AH_REPO_DRIVER_INFLUX=$AH_REPO_DRIVER_BASE:AlarmStateHistInfluxdbRepository - AH_REPO_DRIVER_CASSANDRA=$AH_REPO_DRIVER_BASE:AlarmStateHistCassandraRepository - - MONASCA_PERSISTER_CMD="$MONASCA_PERSISTER_BIN_DIR/monasca-persister --config-file=$MONASCA_PERSISTER_CONF" -else - MONASCA_PERSISTER_APP_PORT=${MONASCA_PERSISTER_APP_PORT:-8090} - MONASCA_PERSISTER_ADMIN_PORT=${MONASCA_PERSISTER_ADMIN_PORT:-8091} - MONASCA_PERSISTER_BIND_HOST=${MONASCA_PERSISTER_BIND_HOST:-$SERVICE_HOST} - - MONASCA_PERSISTER_CONF=${MONASCA_PERSISTER_CONF:-$MONASCA_PERSISTER_CONF_DIR/persister.yml} - MONASCA_PERSISTER_JAVA_OPTS="-Dfile.encoding=UTF-8 -Xmx128m" - MONASCA_PERSISTER_JAR="/opt/monasca/monasca-persister.jar" - MONASCA_PERSISTER_CMD="/usr/bin/java ${MONASCA_PERSISTER_JAVA_OPTS} -cp ${MONASCA_PERSISTER_JAR} monasca.persister.PersisterApplication server ${MONASCA_PERSISTER_CONF}" -fi - -if [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - MONASCA_PERSISTER_BATCH_SIZE=100 - MONASCA_PERSISTER_MAX_BATCH_TIME=10 - MONASCA_PERSISTER_METRIC_THREADS=2 - MONASCA_PERSISTER_COMMIT_BATCH_TIME=10000 -else - MONASCA_PERSISTER_BATCH_SIZE=100 - MONASCA_PERSISTER_MAX_BATCH_TIME=15 - MONASCA_PERSISTER_METRIC_THREADS=10 - MONASCA_PERSISTER_COMMIT_BATCH_TIME=0 -fi - -is_monasca_persister_enabled() { - is_service_enabled monasca-persister && return 0 - return 1 -} - -# common -pre_monasca-persister() { - if ! is_monasca_persister_enabled; then - return - fi - sudo install -d -o ${STACK_USER} ${MONASCA_PERSISTER_GATE_CONFIG} -} - -install_monasca-persister() { - echo_summary "Installing monasca-persister" - - git_clone ${MONASCA_PERSISTER_REPO} ${MONASCA_PERSISTER_DIR} \ - ${MONASCA_PERSISTER_BRANCH} - - install_monasca_persister_$MONASCA_PERSISTER_IMPLEMENTATION_LANG -} -configure_monasca-persister() { - if ! is_monasca_persister_enabled; then - return - fi - - echo_summary "Configuring monasca-persister" - - sudo install -d -o $STACK_USER ${MONASCA_PERSISTER_CONF_DIR} - sudo install -d -o $STACK_USER ${MONASCA_PERSISTER_LOG_DIR} - - configure_monasca_persister_$MONASCA_PERSISTER_IMPLEMENTATION_LANG -} -start_monasca-persister() { - if ! is_monasca_persister_enabled; then - return - fi - - local cmd=${MONASCA_PERSISTER_CMD} - local systemd_service="devstack@monasca-persister.service" - local unitfile="$SYSTEMD_DIR/$systemd_service" - - # sanity check the command - _common_systemd_pitfalls "$cmd" - - # Restart monasca-persister when exited with error code - iniset -sudo $unitfile "Service" "Restart" "on-failure" - write_user_unit_file $systemd_service "$cmd" "" "$STACK_USER" - - echo_summary "Starting monasca-persister" - $SYSTEMCTL enable $systemd_service - $SYSTEMCTL start $systemd_service -} -stop_monasca-persister() { - if ! is_monasca_persister_enabled; then - return - fi - echo_summary "Stopping monasca-persister" - stop_process "monasca-persister" -} -clean_monasca-persister() { - if ! is_monasca_persister_enabled; then - return - fi - echo_summary "Cleaning monasca-persister" - clean_monasca_persister_$MONASCA_PERSISTER_IMPLEMENTATION_LANG - rm -rf ${MONASCA_PERSISTER_GATE_CONFIG} -} -# common - -# python -install_monasca_persister_python() { - echo_summary "Installing monasca-persister" - if python3_enabled; then - enable_python3_package monasca-persister - fi - setup_develop ${MONASCA_PERSISTER_DIR} - - install_monasca_common - if [[ "${MONASCA_METRICS_DB,,}" == 'influxdb' ]]; then - pip_install_gr influxdb - elif [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - pip_install_gr cassandra-driver - fi -} - -configure_monasca_persister_python() { - # ensure fresh installation of configuration files - rm -rf ${MONASCA_PERSISTER_CONF} ${MONASCA_PERSISTER_LOGGING_CONF} - - $MONASCA_PERSISTER_BIN_DIR/oslo-config-generator \ - --config-file $MONASCA_PERSISTER_DIR/config-generator/persister.conf \ - --output-file /tmp/monasca-persister.conf - - install -m 600 /tmp/monasca-persister.conf ${MONASCA_PERSISTER_CONF} && rm -rf /tmp/monasca-persister.conf - - # Set up logging - iniset $MONASCA_PERSISTER_CONF DEFAULT use_syslog $SYSLOG - - # Format logging - setup_logging $MONASCA_PERSISTER_CONF - iniset $MONASCA_PERSISTER_CONF DEFAULT default_log_levels \ - "monasca_common.kafka_lib.client=INFO" - iniset $MONASCA_PERSISTER_CONF DEFAULT debug $ENABLE_DEBUG_LOG_LEVEL - - iniset "$MONASCA_PERSISTER_CONF" kafka num_processors 1 - - iniset "$MONASCA_PERSISTER_CONF" kafka_metrics uri $SERVICE_HOST:9092 - iniset "$MONASCA_PERSISTER_CONF" kafka_metrics group_id 1_metrics - iniset "$MONASCA_PERSISTER_CONF" kafka_metrics topic metrics - iniset "$MONASCA_PERSISTER_CONF" kafka_metrics batch_size 30 - - iniset "$MONASCA_PERSISTER_CONF" kafka_alarm_history uri $SERVICE_HOST:9092 - iniset "$MONASCA_PERSISTER_CONF" kafka_alarm_history group_id 1_alarm-state-transitions - iniset "$MONASCA_PERSISTER_CONF" kafka_alarm_history topic alarm-state-transitions - iniset "$MONASCA_PERSISTER_CONF" kafka_alarm_history batch_size 1 - - iniset "$MONASCA_PERSISTER_CONF" zookeeper uri $SERVICE_HOST:2181 - - if [[ "${MONASCA_METRICS_DB,,}" == 'influxdb' ]]; then - iniset "$MONASCA_PERSISTER_CONF" influxdb database_name mon - iniset "$MONASCA_PERSISTER_CONF" influxdb ip_address ${SERVICE_HOST} - iniset "$MONASCA_PERSISTER_CONF" influxdb port 8086 - iniset "$MONASCA_PERSISTER_CONF" influxdb password password - iniset "$MONASCA_PERSISTER_CONF" repositories metrics_driver ${M_REPO_DRIVER_INFLUX} - iniset "$MONASCA_PERSISTER_CONF" repositories alarm_state_history_driver ${AH_REPO_DRIVER_INFLUX} - elif [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - iniset "$MONASCA_PERSISTER_CONF" cassandra contact_points ${SERVICE_HOST} - iniset "$MONASCA_PERSISTER_CONF" cassandra port 9042 - # iniset "$MONASCA_PERSISTER_CONF" cassandra user monasca - # iniset "$MONASCA_PERSISTER_CONF" cassandra password password - iniset "$MONASCA_PERSISTER_CONF" cassandra keyspace monasca - iniset "$MONASCA_PERSISTER_CONF" cassandra local_data_center datacenter1 - iniset "$MONASCA_PERSISTER_CONF" cassandra connection_timeout 5 - iniset "$MONASCA_PERSISTER_CONF" cassandra read_timeout 60 - iniset "$MONASCA_PERSISTER_CONF" cassandra max_write_retries 5 - iniset "$MONASCA_PERSISTER_CONF" cassandra max_batches 250 - iniset "$MONASCA_PERSISTER_CONF" cassandra max_definition_cache_size 1000000 - # consistency level names: - # ANY(0), - # ONE(1), - # TWO(2), - # THREE(3), - # QUORUM(4), - # ALL(5), - # LOCAL_QUORUM(6), - # EACH_QUORUM(7), - # SERIAL(8), - # LOCAL_SERIAL(9), - # LOCAL_ONE(10); - iniset "$MONASCA_PERSISTER_CONF" cassandra consistency_level ONE - iniset "$MONASCA_PERSISTER_CONF" cassandra retention_policy 45 - iniset "$MONASCA_PERSISTER_CONF" repositories metrics_driver ${M_REPO_DRIVER_CASSANDRA} - iniset "$MONASCA_PERSISTER_CONF" repositories alarm_state_history_driver ${AH_REPO_DRIVER_CASSANDRA} - fi - - ln -sf ${MONASCA_PERSISTER_CONF} ${MONASCA_PERSISTER_GATE_CONFIG} -} - -clean_monasca_persister_python() { - rm -rf ${MONASCA_PERSISTER_CONF} ${MONASCA_PERSISTER_LOGGING_CONF} -} -# python - -# java -install_monasca_persister_java() { - (cd "${MONASCA_PERSISTER_DIR}"/java ; sudo mvn clean package -DskipTests) - - local version="" - version="$(get_version_from_pom "${MONASCA_PERSISTER_DIR}"/java)" - sudo cp -f "${MONASCA_PERSISTER_DIR}"/java/target/monasca-persister-${version}-shaded.jar \ - ${MONASCA_PERSISTER_JAR} -} - -configure_monasca_persister_java() { - # ensure fresh installation of configuration file - rm -rf $MONASCA_PERSISTER_CONF - - install -m 600 "${MONASCA_API_DIR}"/devstack/files/monasca-persister/persister.yml ${MONASCA_PERSISTER_CONF} - sudo sed -e " - s|%ZOOKEEPER_HOST%|${SERVICE_HOST}|g; - s|%VERTICA_HOST%|${SERVICE_HOST}|g; - s|%INFLUXDB_HOST%|${SERVICE_HOST}|g; - s|%CASSANDRADB_HOST%|${SERVICE_HOST}|g; - s|%MONASCA_PERSISTER_DB_TYPE%|${MONASCA_METRICS_DB}|g; - s|%MONASCA_PERSISTER_BIND_HOST%|${MONASCA_PERSISTER_BIND_HOST}|g; - s|%MONASCA_PERSISTER_APP_PORT%|${MONASCA_PERSISTER_APP_PORT}|g; - s|%MONASCA_PERSISTER_ADMIN_PORT%|${MONASCA_PERSISTER_ADMIN_PORT}|g; - s|%MONASCA_PERSISTER_LOG_DIR%|${MONASCA_PERSISTER_LOG_DIR}|g; - s|%MONASCA_PERSISTER_BATCH_SIZE%|${MONASCA_PERSISTER_BATCH_SIZE}|g; - s|%MONASCA_PERSISTER_MAX_BATCH_TIME%|${MONASCA_PERSISTER_MAX_BATCH_TIME}|g; - s|%MONASCA_PERSISTER_COMMIT_BATCH_TIME%|${MONASCA_PERSISTER_COMMIT_BATCH_TIME}|g; - s|%MONASCA_PERSISTER_METRIC_THREADS%|${MONASCA_PERSISTER_METRIC_THREADS}|g; - " -i ${MONASCA_PERSISTER_CONF} - - ln -sf ${MONASCA_PERSISTER_CONF} ${MONASCA_PERSISTER_GATE_CONFIG} -} - -clean_monasca_persister_java() { - rm -rf ${MONASCA_PERSISTER_CONF} ${MONASCA_PERSISTER_LOGGING_CONF} \ - ${MONASCA_PERSISTER_JAR} -} -# java - -${_XTRACE_MON_PERSISTER} diff --git a/devstack/lib/profile.sh b/devstack/lib/profile.sh deleted file mode 100644 index 884c2a3a7..000000000 --- a/devstack/lib/profile.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_MON_PROFILE=$(set +o | grep xtrace) -set +o xtrace - -function install_monasca_profile { - - echo_summary "Install Monasca Bash Profile" - - touch /tmp/monasca_cli.sh - cat > /tmp/monasca_cli.sh << EOF -# signalize we're in shape to use monasca here -export PS1='[\u@\h \W(monasca)]\$ ' -# set monasca client bash_completion -source ${MONASCA_COMPLETION_FILE} -# set OS_* variables -source $TOP_DIR/openrc mini-mon mini-mon -# override password for mini-mon (guy is not using SERVICE_PASSWORD) -export OS_PASSWORD=password -EOF - - if [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - cat >> /tmp/monasca_cli.sh << EOF -# allow to use cassandra cli -export CQLSH_NO_BUNDLED=true -export CQLSH_HOST=${SERVICE_HOST} -EOF - fi - - sudo install -D -m 0644 -o ${STACK_USER} \ - /tmp/monasca_cli.sh ${MONASCA_PROFILE_FILE} - rm /tmp/monasca_cli.sh -} - -function clean_monasca_profile { - echo_summary "Clean Monasca CLI Creds" - sudo rm -f ${MONASCA_PROFILE_FILE} -} - -${_XTRACE_DASHBOARD} diff --git a/devstack/lib/storm.sh b/devstack/lib/storm.sh deleted file mode 100644 index 1c899692d..000000000 --- a/devstack/lib/storm.sh +++ /dev/null @@ -1,166 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# call_order: -# - is_storm_enabled -# - install_storm -# - configure_storm -# - clean_storm - -_XTRACE_STORM=$(set +o | grep xtrace) -set +o xtrace - -STORM_USER="storm" -STORM_GROUP="storm" - -STORM_DIR="/opt/storm" -STORM_CURRENT_DIR="${STORM_DIR}/current" -STORM_BIN="${STORM_CURRENT_DIR}/bin/storm" -STORM_WORK_DIR="/var/storm" -STORM_LOG_DIR="/var/log/storm" - -STORM_TARBALL="apache-storm-${STORM_VERSION}.tar.gz" -STORM_TARBALL_DEST="${FILES}/${STORM_TARBALL}" - -STORM_NIMBUS_CMD="${STORM_BIN} nimbus" -STORM_SUPERVISOR_CMD="${STORM_BIN} supervisor" -STORM_UI_CMD="${STORM_BIN} ui" -STORM_LOGVIEWER_CMD="${STORM_BIN} logviewer" - -function is_storm_enabled { - [[ ,${ENABLED_SERVICES} =~ ,"monasca-storm" ]] && return 0 - return 1 -} - -function start_storm { - if is_storm_enabled; then - echo_summary "Starting storm" - - run_process "monasca-storm-nimbus" "${STORM_NIMBUS_CMD}" "${STORM_GROUP}" "${STORM_USER}" - run_process "monasca-storm-supervisor" "${STORM_SUPERVISOR_CMD}" "${STORM_GROUP}" "${STORM_USER}" - run_process "monasca-storm-ui" "${STORM_UI_CMD}" "${STORM_GROUP}" "${STORM_USER}" - run_process "monasca-storm-logviewer" "${STORM_LOGVIEWER_CMD}" "${STORM_GROUP}" "${STORM_USER}" - fi -} - -function stop_storm { - if is_storm_enabled; then - echo_summary "Stopping storm" - - stop_process "monasca-storm-nimbus" - stop_process "monasca-storm-supervisor" - stop_process "monasca-storm-ui" - stop_process "monasca-storm-logviewer" - fi -} - -function clean_storm { - if is_storm_enabled; then - echo_summary "Cleaning storm" - - sudo unlink "${DEST}/logs/storm-workers" || true - sudo unlink "${STORM_CURRENT_DIR}/logs"|| true - sudo unlink "${STORM_CURRENT_DIR}"|| true - - sudo rm -rf "${DEST}/logs/storm-workers" || true - sudo rm -rf "${STORM_CURRENT_DIR}"|| true - sudo rm -rf "${STORM_DIR}" || true - sudo rm -rf "${STORM_WORK_DIR}" || true - sudo rm -rf "${STORM_LOG_DIR}" || true - - sudo userdel "${STORM_USER}" || true - sudo groupdel "${STORM_GROUP}" || true - fi -} - -function configure_storm { - if is_storm_enabled; then - echo_summary "Configuring storm" - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/storm.yaml "${STORM_CURRENT_DIR}/conf/storm.yaml" - sudo chown "${STORM_USER}":"${STORM_GROUP}" "${STORM_CURRENT_DIR}/conf/storm.yaml" - sudo chmod 0644 "${STORM_CURRENT_DIR}/conf/storm.yaml" - - sudo sed -e " - s|%STORM_UI_HOST%|${STORM_UI_HOST}|g; - s|%STORM_UI_PORT%|${STORM_UI_PORT}|g; - s|%STORM_LOGVIEWER_PORT%|${STORM_LOGVIEWER_PORT}|g; - " -i "${STORM_CURRENT_DIR}/conf/storm.yaml" - - fi -} - -function install_storm { - if is_storm_enabled; then - echo_summary "Installing storm" - _download_storm - _setup_user_group - _create_directories - _install_storm - fi -} - -function post_storm { - if is_storm_enabled; then - echo "Post configuring storm" - # if inside the gate, make the visible there too - if [ -n "${LOGDIR}" ]; then - sudo ln -sfd "${STORM_LOG_DIR}/workers-artifacts" "${LOGDIR}/storm-workers" - fi - fi -} - -# helpers - -function _download_storm { - local storm_tarball_url="${APACHE_ARCHIVES}storm/apache-storm-${STORM_VERSION}/${STORM_TARBALL}" - local storm_dest - - storm_dest=`get_extra_file ${storm_tarball_url}` - - if [ "${storm_dest}" != "${STORM_TARBALL_DEST}" ]; then - mv -f "${storm_dest}" "${STORM_TARBALL_DEST}" - fi -} - -function _setup_user_group { - sudo groupadd --system "${STORM_GROUP}" || true - sudo useradd --system -g "${STORM_GROUP}" "${STORM_USER}" || true -} - -function _install_storm { - # unpack (i.e. install) downloaded tarball - sudo tar -xzf ${STORM_TARBALL_DEST} -C "${STORM_DIR}" - - # link the versioned folder to more suitable one - sudo ln -sfd "${STORM_DIR}/apache-storm-${STORM_VERSION}" "${STORM_CURRENT_DIR}" - - # make them visible in standard location - sudo ln -sfd "${STORM_LOG_DIR}" "${STORM_CURRENT_DIR}/logs" -} - -function _create_directories { - for dir in "${STORM_DIR}" "${STORM_WORK_DIR}" "${STORM_LOG_DIR}"; do - if [ ! -d "${dir}" ]; then - sudo mkdir -p "${dir}" || true - fi - sudo chown "${STORM_USER}":"${STORM_GROUP}" "${dir}" - sudo chmod 0775 "${dir}" - done -} - -# helpers - -$_XTRACE_STORM diff --git a/devstack/lib/ui.sh b/devstack/lib/ui.sh deleted file mode 100644 index a48de381a..000000000 --- a/devstack/lib/ui.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_DASHBOARD=$(set +o | grep xtrace) -set +o xtrace - -function is_ui_enabled { - is_service_enabled horizon && return 0 - return 1 -} - -function clean_ui { - if is_ui_enabled; then - rm -rf "${HORIZON_DIR}/monitoring" \ - "${HORIZON_DIR}/openstack_dashboard/local/enabled/_50_admin_add_monitoring_panel.py" \ - "${HORIZON_DIR}/openstack_dashboard/conf/monitoring_policy.json" - fi -} - -function configure_ui { - if is_ui_enabled; then - _link_ui_files - - cp $MONASCA_UI_DIR/monitoring/config/local_settings.py \ - $HORIZON_DIR/openstack_dashboard/local/local_settings.d/_50_monasca_ui_settings.py - - local localSettings=$HORIZON_DIR/openstack_dashboard/local/local_settings.d/_50_monasca_ui_settings.py - - sed -e " - s#getattr(settings, 'GRAFANA_URL', None)#{'RegionOne': \"http:\/\/${SERVICE_HOST}:3000\", }#g; - " -i ${localSettings} - - if is_service_enabled horizon && is_service_enabled kibana && is_service_enabled monasca-log; then - echo_summary "Configure Horizon with Kibana access" - sudo sed -e " - s|KIBANA_HOST = getattr(settings, 'KIBANA_HOST', 'http://192.168.10.6:5601/')|KIBANA_HOST = getattr(settings, 'KIBANA_HOST', 'http://${KIBANA_SERVICE_HOST}:${KIBANA_SERVICE_PORT}/')|g; - " -i ${localSettings} - - sudo sed -e " - s|'ENABLE_LOG_MANAGEMENT_BUTTON', False|'ENABLE_LOG_MANAGEMENT_BUTTON', True|g; - " -i ${localSettings} - fi - if python3_enabled; then - DJANGO_SETTINGS_MODULE=openstack_dashboard.settings python3 "${MONASCA_BASE}"/horizon/manage.py collectstatic --noinput - DJANGO_SETTINGS_MODULE=openstack_dashboard.settings python3 "${MONASCA_BASE}"/horizon/manage.py compress --force - else - DJANGO_SETTINGS_MODULE=openstack_dashboard.settings python "${MONASCA_BASE}"/horizon/manage.py collectstatic --noinput - DJANGO_SETTINGS_MODULE=openstack_dashboard.settings python "${MONASCA_BASE}"/horizon/manage.py compress --force - fi - restart_service apache2 || true - fi -} - -function install_ui { - if is_ui_enabled; then - git_clone $MONASCA_UI_REPO $MONASCA_UI_DIR $MONASCA_UI_BRANCH - git_clone $MONASCA_CLIENT_REPO $MONASCA_CLIENT_DIR $MONASCA_CLIENT_BRANCH - if python3_enabled; then - enable_python3_package monasca-ui - fi - - setup_develop $MONASCA_UI_DIR - setup_dev_lib "python-monascaclient" - - fi -} - -function _link_ui_files { - ln -f "${MONASCA_UI_DIR}/monitoring/enabled/_50_admin_add_monitoring_panel.py" \ - "${HORIZON_DIR}/openstack_dashboard/local/enabled/_50_admin_add_monitoring_panel.py" - ln -f "${MONASCA_UI_DIR}/monitoring/conf/monitoring_policy.json" \ - "${HORIZON_DIR}/openstack_dashboard/conf/monitoring_policy.json" - ln -sfF "${MONASCA_UI_DIR}"/monitoring "${HORIZON_DIR}/monitoring" -} - -$_XTRACE_DASHBOARD diff --git a/devstack/lib/zookeeper.sh b/devstack/lib/zookeeper.sh deleted file mode 100644 index 2b16da010..000000000 --- a/devstack/lib/zookeeper.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash - -# Copyright 2020 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -_XTRACE_ZOOKEEPER=$(set +o | grep xtrace) -set +o xtrace - -function is_zookeeper_enabled { - is_service_enabled monasca-zookeeper && return 0 - return 1 -} - -function clean_zookeeper { - - if is_zookeeper_enabled; then - echo_summary "Cleaning Monasca Zookeeper" - - sudo systemctl disable zookeeper - sudo systemctl stop zookeeper - sudo rm -rf /var/log/zookeeper - sudo rm -rf /var/lib/zookeeper - sudo rm -rf /opt/zookeeper - sudo rm -rf /opt/apache-zookeeper-${ZOOKEEPER_VERSION}-bin - sudo rm -rf /etc/systemd/system/zookeeper.service - sudo systemctl daemon-reload - fi -} - -function install_zookeeper { - - if is_zookeeper_enabled; then - echo_summary "Install Monasca Zookeeper" - - local zookeeper_tarball=apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz - local zookeeper_tarball_url=${APACHE_ARCHIVES}zookeeper/zookeeper-${ZOOKEEPER_VERSION}/${zookeeper_tarball} - local zookeeper_tarball_dest - zookeeper_tarball_dest=`get_extra_file ${zookeeper_tarball_url}` - - sudo groupadd --system zookeeper || true - sudo useradd --system -g zookeeper zookeeper || true - sudo tar -xzf ${zookeeper_tarball_dest} -C /opt - sudo ln -sf /opt/apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper - sudo cp $PLUGIN_FILES/zookeeper/* /opt/zookeeper/conf - sudo chown -R zookeeper:zookeeper /opt/zookeeper/ - - sudo mkdir /var/log/zookeeper - sudo chown -R zookeeper:zookeeper /var/log/zookeeper - - sudo mkdir /var/lib/zookeeper - sudo chown -R zookeeper:zookeeper /var/lib/zookeeper - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/zookeeper/zookeeper.service /etc/systemd/system/zookeeper.service - sudo chmod 644 /etc/systemd/system/zookeeper.service - - sudo systemctl daemon-reload - sudo systemctl enable zookeeper - sudo systemctl start zookeeper || sudo systemctl restart zookeeper - fi -} - -$_XTRACE_ZOOKEEPER diff --git a/devstack/override-defaults b/devstack/override-defaults deleted file mode 100644 index 5e7a0f6fa..000000000 --- a/devstack/override-defaults +++ /dev/null @@ -1,3 +0,0 @@ -if [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'java' ]]; then - WSGI_MODE=mod_wsgi -fi diff --git a/devstack/plugin.sh b/devstack/plugin.sh deleted file mode 100755 index c6c92e9c5..000000000 --- a/devstack/plugin.sh +++ /dev/null @@ -1,1555 +0,0 @@ -# -# (C) Copyright 2015-2017 Hewlett Packard Enterprise Development LP -# Copyright 2017 FUJITSU LIMITED -# (C) Copyright 2017 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Monasca DevStack plugin -# -# Install and start Monasca service in devstack -# -# To enable Monasca in devstack add an entry to local.conf that -# looks like -# -# [[local|localrc]] -# enable_plugin monasca https://git.openstack.org/openstack/monasca-api -# -# By default all Monasca services are started (see -# devstack/settings). To disable a specific service use the -# disable_service function. For example to turn off notification: -# -# disable_service monasca-notification -# -# Several variables set in the localrc section adjust common behaviors -# of Monasca (see within for additional settings): -# -# EXAMPLE VARS HERE - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -o xtrace - -ERREXIT=$(set +o | grep errexit) -set -o errexit - -# source lib/* -source ${MONASCA_API_DIR}/devstack/lib/constants.sh -source ${MONASCA_API_DIR}/devstack/lib/zookeeper.sh -source ${MONASCA_API_DIR}/devstack/lib/ui.sh -source ${MONASCA_API_DIR}/devstack/lib/notification.sh -source ${MONASCA_API_DIR}/devstack/lib/profile.sh -source ${MONASCA_API_DIR}/devstack/lib/client.sh -source ${MONASCA_API_DIR}/devstack/lib/persister.sh -source ${MONASCA_API_DIR}/devstack/lib/storm.sh -source ${MONASCA_API_DIR}/devstack/lib/monasca-log.sh -# source lib/* - -# Set default implementations to python -export MONASCA_API_IMPLEMENTATION_LANG=${MONASCA_API_IMPLEMENTATION_LANG:-python} - -# Set default persistent layer settings -export MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-influxdb} -# Make sure we use ORM mapping as default if postgresql is enabled -if is_service_enabled mysql; then - MONASCA_DATABASE_USE_ORM=${MONASCA_DATABASE_USE_ORM:-false} -elif is_service_enabled postgresql; then - MONASCA_DATABASE_USE_ORM=true -fi -MONASCA_DATABASE_USE_ORM=$(trueorfalse False MONASCA_DATABASE_USE_ORM) - -# Set INFLUXDB_VERSION -if [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'java' ]]; then - - INFLUXDB_VERSION=${INFLUXDB_VERSION:-${INFLUXDB_JAVA_VERSION}} - -elif [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'python' ]]; then - - INFLUXDB_VERSION=${INFLUXDB_VERSION:-${INFLUXDB_PYTHON_VERSION}} - -else - - echo "Found invalid value for variable MONASCA_API_IMPLEMENTATION_LANG: $MONASCA_API_IMPLEMENTATION_LANG" - echo "Valid values for MONASCA_API_IMPLEMENTATION_LANG are \"java\" and \"python\"" - die "Please set MONASCA_API_IMPLEMENTATION_LANG to either \"java'' or \"python\"" - -fi - -# monasca-api settings -if [[ ${USE_VENV} = True ]]; then - PROJECT_VENV["monasca-api"]=${MONASCA_API_DIR}.venv - MONASCA_API_BIN_DIR=${PROJECT_VENV["monasca-api"]}/bin -else - MONASCA_API_BIN_DIR=$(get_python_exec_prefix) -fi - -if [[ "${MONASCA_API_USE_MOD_WSGI}" == 'True' && "${MONASCA_API_IMPLEMENTATION_LANG}" == "python" ]]; then - MONASCA_API_BASE_URI=${MONASCA_API_SERVICE_PROTOCOL}://${MONASCA_API_SERVICE_HOST}/metrics -else - MONASCA_API_BASE_URI=${MONASCA_API_SERVICE_PROTOCOL}://${MONASCA_API_SERVICE_HOST}:${MONASCA_API_SERVICE_PORT} -fi - - -MONASCA_API_URI_V2=${MONASCA_API_BASE_URI}/v2.0 - -# Files inside this directory will be visible in gates log -MON_API_GATE_CONFIGURATION_DIR=/etc/monasca-api - -function pre_install_monasca { - echo_summary "Pre-Installing Monasca Components" - find_nearest_apache_mirror - install_gate_config_holder - configure_system_encoding_format - install_zookeeper - install_kafka - install_storm - - install_monasca_virtual_env - install_monasca_$MONASCA_METRICS_DB - - pre_monasca-persister -} - -function install_monasca { - - echo_summary "Installing Monasca" - - install_monasca_common_java - if is_service_enabled monasca-persister; then - stack_install_service monasca-persister - fi - if is_service_enabled monasca-notification; then - stack_install_service monasca-notification - fi - - if is_service_enabled monasca-thresh; then - if ! is_storm_enabled; then - die "monasca-thresh requires monasca-storm service to be enabled" - fi - install_monasca_thresh - fi - - if is_service_enabled monasca-api; then - if [ "$MONASCA_API_IMPLEMENTATION_LANG" == "python" ]; then - stack_install_service monasca-api - else - install_monasca_api_java - sudo systemctl enable monasca-api - fi - fi - - install_ui -} - -function configure_monasca { - echo_summary "Configuring Monasca" - - configure_storm - configure_ui - configure_monasca_api - configure_monasca-notification - configure_monasca-persister - install_schema -} - -function configure_system_encoding_format { - # This is needed to build monasca-common - export LANGUAGE=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 - export LANG=en_US.UTF-8 - export LC_TYPE=en_US.UTF-8 -} - -function extra_monasca { - echo_summary "Installing additional monasca components" - - create_accounts - install_monasca_agent - install_monascaclient - install_monasca_profile - - if is_service_enabled horizon; then - install_nodejs - install_go - install_monasca_grafana - fi - - start_monasca_services - init_collector_service - post_storm - - if is_service_enabled horizon; then - init_monasca_grafana - fi -} - -function start_monasca_services { - start_storm - if is_service_enabled monasca-api; then - start_monasca_api - fi - start_monasca-notification - start_monasca-persister - if is_service_enabled monasca-thresh; then - start_service monasca-thresh || restart_service monasca-thresh - fi - if is_service_enabled horizon; then - start_service grafana-server || restart_service grafana-server - fi - if is_service_enabled monasca-agent; then - sudo /usr/local/bin/monasca-reconfigure - if is_service_enabled nova && [ "$VIRT_DRIVER" = "libvirt" ]; then - sudo /opt/monasca-agent/bin/monasca-setup -d libvirt - fi - fi -} - -function delete_kafka_topics { - - for topic in ${KAFKA_SERVICE_TOPICS//,/ }; do - /opt/kafka/bin/kafka-topics.sh --delete \ - --bootstrap-server $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT \ - --replication-factor 1 \ - --partitions 3 \ - --topic $topic || true - done -} - -function unstack_monasca { - stop_service grafana-server || true - - [[ -f /etc/systemd/system/monasca-agent.target ]] && stop_service monasca-agent.target || true - - stop_service monasca-thresh || true - - stop_storm - stop_monasca-notification - stop_monasca-persister - stop_monasca_api - - delete_kafka_topics - stop_service kafka || true - - stop_service influxdb || true - - stop_service verticad || true - - stop_service vertica_agent || true - - stop_service cassandra || true -} - -function clean_monasca { - - set +o errexit - - unstack_monasca - clean_ui - - if is_service_enabled horizon; then - clean_nodejs - clean_monasca_grafana - clean_go - fi - - if is_service_enabled monasca-agent; then - clean_monasca_agent - fi - if is_service_enabled monasca-thresh; then - clean_monasca_thresh - fi - clean_storm - if is_service_enabled monasca-api; then - clean_monasca_api_$MONASCA_API_IMPLEMENTATION_LANG - fi - - clean_monasca-persister - clean_monasca-notification - clean_monasca_common_java - - clean_schema - - clean_monasca_profile - clean_monascaclient - - clean_monasca_$MONASCA_METRICS_DB - - clean_kafka - - clean_zookeeper - - clean_monasca_virtual_env - - #Restore errexit - set -o errexit -} - -function install_monasca_virtual_env { - - echo_summary "Install Monasca Virtual Environment" - - sudo groupadd --system monasca || true - - sudo mkdir -p /opt/monasca || true - - sudo chown $STACK_USER:monasca /opt/monasca - - (cd /opt/monasca ; virtualenv .) -} - -function clean_monasca_virtual_env { - - echo_summary "Clean Monasca Virtual Environment" - - sudo rm -rf /opt/monasca - - sudo groupdel monasca - -} - -function install_kafka { - - echo_summary "Install Monasca Kafka" - - local kafka_tarball=kafka_${KAFKA_VERSION}.tgz - local kafka_tarball_url=${APACHE_ARCHIVES}kafka/${BASE_KAFKA_VERSION}/${kafka_tarball} - - local kafka_tarball_dest - kafka_tarball_dest=`get_extra_file ${kafka_tarball_url}` - - sudo groupadd --system kafka || true - - sudo useradd --system -g kafka kafka || true - - sudo tar -xzf ${kafka_tarball_dest} -C /opt - - sudo ln -sf /opt/kafka_${KAFKA_VERSION} /opt/kafka - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/kafka/kafka-server-start.sh /opt/kafka_${KAFKA_VERSION}/bin/kafka-server-start.sh - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/kafka/kafka.service /etc/systemd/system/kafka.service - - sudo chown root:root /etc/systemd/system/kafka.service - - sudo chmod 644 /etc/systemd/system/kafka.service - - sudo mkdir -p /var/kafka || true - - sudo chown kafka:kafka /var/kafka - - sudo chmod 755 /var/kafka - - sudo rm -rf /var/kafka/lost+found - - sudo mkdir -p /var/log/kafka || true - - sudo chown kafka:kafka /var/log/kafka - - sudo chmod 755 /var/log/kafka - - sudo ln -sf /opt/kafka/config /etc/kafka - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/kafka/server.properties /etc/kafka/server.properties - - sudo chown kafka:kafka /etc/kafka/server.properties - - sudo chmod 644 /etc/kafka/server.properties - - # set kafka listeners address. - sudo sed -i "s/listeners = PLAINTEXT:\/\/your.host.name:9092/listeners = PLAINTEXT:\/\/your.host.name:9092\nlisteners=PLAINTEXT:\/\/${SERVICE_HOST}:9092/"\ - /etc/kafka/server.properties - - sudo systemctl enable kafka - - sudo systemctl start kafka || sudo systemctl restart kafka - -} - -function clean_kafka { - - echo_summary "Clean Monasca Kafka" - - sudo rm -rf /var/kafka - - sudo rm -rf /var/log/kafka - - sudo rm -rf /etc/kafka - - sudo rm -rf /opt/kafka - - sudo systemctl disable kafka - - sudo rm -rf /etc/systemd/system/kafka.service - - sudo userdel kafka - - sudo groupdel kafka - - sudo rm -rf /opt/kafka_${KAFKA_VERSION} - - sudo rm -rf ${FILES}/kafka_${KAFKA_VERSION}.tgz - -} - -function install_monasca_influxdb { - - if is_service_enabled monasca-persister; then - echo_summary "Install Monasca Influxdb" - - local influxdb_deb=influxdb_${INFLUXDB_VERSION}_amd64.deb - local influxdb_deb_url=${INFLUXDB_DEB_URL}${influxdb_deb} - echo "influxdb deb url: ${influxdb_deb_url}" - - local influxdb_deb_dest - influxdb_deb_dest=`get_extra_file ${influxdb_deb_url}` - - sudo dpkg --skip-same-version -i ${influxdb_deb_dest} - - # Validate INFLUXDB_VERSION - validate_version ${INFLUXDB_VERSION} - - if [[ $? -ne 0 ]]; then - echo "Found invalid value for variable INFLUXDB_VERSION: $INFLUXDB_VERSION" - echo "Valid values for INFLUXDB_VERSION must be in the form of 1.0.0" - die "Please set INFLUXDB_VERSION to a correct value" - fi - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/influxdb/influxdb.conf /etc/influxdb/influxdb.conf - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/influxdb/influxdb /etc/default/influxdb - - sudo systemctl start influxdb || sudo systemctl restart influxdb - fi - -} - -function install_monasca_vertica { - - echo_summary "Install Monasca Vertica" - - apt_get install dialog - - sudo dpkg --skip-same-version -i /vagrant_home/vertica_${VERTICA_VERSION}_amd64.deb - - # Download Vertica JDBC driver - # local vertica_jar=vertica-jdbc-${VERTICA_VERSION}.jar - # local vertica_jar_url=https://my.vertica.com/client_drivers/7.2.x/${VERTICA_VERSION}/${vertica_jar} - - # local vertica_jar_dest - # vertica_jar_dest=`get_extra_file ${vertica_jar_url}` - - # Current version of Vertica 8.0.0 doesn't support Ubuntu Xenial, so fake a version - sudo cp -p /etc/debian_version /etc/debian_version.org - sudo sh -c "echo 'jessie/sid' > /etc/debian_version" - - sudo /opt/vertica/sbin/install_vertica --hosts "127.0.0.1" --deb /vagrant_home/vertica_${VERTICA_VERSION}_amd64.deb --dba-user-password password --license CE --accept-eula --failure-threshold NONE - - sudo su dbadmin -c '/opt/vertica/bin/admintools -t create_db -s "127.0.0.1" -d mon -p password' - - # Bring back Ubuntu version - sudo mv /etc/debian_version.org /etc/debian_version - - # Copy Vertica JDBC driver to /opt/monasca - # sudo cp ${FILES}/vertica-jdbc-${VERTICA_VERSION}.jar /opt/monasca/vertica-jdbc-${VERTICA_VERSION}.jar - sudo cp /vagrant_home/vertica-jdbc-${VERTICA_VERSION}.jar /opt/monasca/vertica-jdbc-${VERTICA_VERSION}.jar - -} - -function install_monasca_cassandra { - - if is_service_enabled monasca-persister; then - echo_summary "Install Monasca Cassandra" - - if [[ "$OFFLINE" != "True" ]]; then - sudo sh -c "echo 'deb [signed-by=/etc/apt/keyrings/apache-cassandra.asc] https://debian.cassandra.apache.org ${CASSANDRA_VERSION} main' > /etc/apt/sources.list.d/cassandra.sources.list" - REPOS_UPDATED=False - mkdir -p /etc/apt/keyrings - curl -o /etc/apt/keyrings/apache-cassandra.asc https://downloads.apache.org/cassandra/KEYS - PUBLIC_KEY=`sudo apt_get update 2>&1 | awk '/NO_PUBKEY/ {print $NF}'` - if [ -n "${PUBLIC_KEY}" ]; then - sudo apt-key adv --keyserver pool.sks-keyservers.net --recv-key ${PUBLIC_KEY} - fi - fi - - REPOS_UPDATED=False - apt_get_update - apt_get install cassandra - - if [[ ${SERVICE_HOST} ]]; then - - # set cassandra server listening ip address - sudo sed -i "s/^rpc_address: localhost/rpc_address: ${SERVICE_HOST}/g" /etc/cassandra/cassandra.yaml - - fi - - # set batch size larger - sudo sed -i "s/^batch_size_warn_threshold_in_kb: 5/batch_size_warn_threshold_in_kb: 50/g" /etc/cassandra/cassandra.yaml - - sudo sed -i "s/^batch_size_fail_threshold_in_kb: 50/batch_size_fail_threshold_in_kb: 500/g" /etc/cassandra/cassandra.yaml - - sudo service cassandra restart - - echo "Sleep for 15 seconds to wait starting up Cassandra" - sleep 15s - - export CQLSH_NO_BUNDLED=true - - # always needed for Monasca api - pip_install_gr cassandra-driver - fi -} - -function clean_monasca_influxdb { - - echo_summary "Clean Monasca Influxdb" - - sudo rm -f /etc/default/influxdb - - sudo rm -f /etc/influxdb/influxdb.conf - - sudo dpkg --purge influxdb - - sudo rm -rf /var/log/influxdb - - sudo rm -rf /tmp/influxdb - - sudo rm -rf /var/lib/influxdb - - sudo rm -rf /etc/init.d/influxdb - - sudo rm -rf /opt/staging/influxdb/influxdb-package - - sudo rm -rf /etc/influxdb - - sudo rm -rf /tmp/bootstrap* - - sudo rm -rf /run/influxdb - - sudo rm -f ${FILES}/influxdb_${INFLUXDB_VERSION}_amd64.deb - - sudo rm -f /etc/init.d/influxdb -} - -function clean_monasca_vertica { - - echo_summary "Clean Monasca Vertica" - - sudo rm -rf /opt/vertica - - sudo dpkg --purge vertica - - sudo userdel dbadmin - - sudo groupdel verticadba - - sudo rm -rf /home/dbadmin - - apt_get purge dialog -} - -function clean_monasca_cassandra { - - echo_summary "Clean Monasca Cassandra" - - apt_get purge cassandra - - apt_get autoremove - - sudo rm -rf /var/lib/cassandra - - sudo rm -rf /var/log/cassandra - - sudo rm -rf /etc/cassandra - - sudo rm -f /etc/apt/sources.list.d/cassandra.list - - sudo rm -f /etc/apt/trusted.gpg.d/cassandra.gpg -} - -function install_schema { - echo_summary "Install Monasca Schema" - - sudo mkdir -p $MONASCA_SCHEMA_DIR || true - sudo chmod 0755 $MONASCA_SCHEMA_DIR - - install_schema_metric_database_$MONASCA_METRICS_DB - install_schema_alarm_database - install_schema_kafka_topics -} - -function install_schema_metric_database_influxdb { - # sudo cp -f "${MONASCA_API_DIR}"/devstack/files/schema/influxdb_setup.py $MONASCA_SCHEMA_DIR/influxdb_setup.py - # sudo chmod 0750 $MONASCA_SCHEMA_DIR/influxdb_setup.py - # sudo chown root:root $MONASCA_SCHEMA_DIR/influxdb_setup.py - # if python3_enabled; then - # sudo python3 $MONASCA_SCHEMA_DIR/influxdb_setup.py - # else - # sudo python $MONASCA_SCHEMA_DIR/influxdb_setup.py - # fi - curl --user root:root \ - -XPOST 'http://127.0.0.1:8086/query' \ - --data-urlencode 'q=CREATE DATABASE "mon"' - curl --user root:root \ - -XPOST 'http://127.0.0.1:8086/query' \ - --data-urlencode 'db=mon' \ - --data-urlencode 'q=CREATE RETENTION POLICY "persister_all" ON mon DURATION 90d REPLICATION 1 DEFAULT' - curl --user root:root \ - -XPOST 'http://127.0.0.1:8086/query' \ - --data-urlencode 'db=mon' \ - --data-urlencode "q=CREATE USER mon_api WITH PASSWORD 'password'" - curl --user root:root \ - -XPOST 'http://127.0.0.1:8086/query' \ - --data-urlencode 'db=mon' \ - --data-urlencode "q=CREATE USER mon_persister WITH PASSWORD 'password'" -} - -function install_schema_metric_database_vertica { - /opt/vertica/bin/vsql -U dbadmin -w password < "${MONASCA_API_DIR}"/devstack/files/vertica/mon_metrics.sql - /opt/vertica/bin/vsql -U dbadmin -w password < "${MONASCA_API_DIR}"/devstack/files/vertica/mon_alarms.sql - /opt/vertica/bin/vsql -U dbadmin -w password < "${MONASCA_API_DIR}"/devstack/files/vertica/roles.sql - /opt/vertica/bin/vsql -U dbadmin -w password < "${MONASCA_API_DIR}"/devstack/files/vertica/users.sql -} - -function install_schema_metric_database_cassandra { - if is_service_enabled monasca-persister; then - local CASSANDRA_CONNECT_TIMEOUT=300 - local CASSANDRA_REQUEST_TIMEOUT=300 - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/cassandra/*.cql $MONASCA_SCHEMA_DIR - /usr/bin/cqlsh ${SERVICE_HOST} -f $MONASCA_SCHEMA_DIR/monasca_schema.cql \ - --connect-timeout="${CASSANDRA_CONNECT_TIMEOUT}" \ - --request-timeout="${CASSANDRA_REQUEST_TIMEOUT}" - fi -} - -function install_schema_kafka_topics { - sudo mkdir -p /opt/kafka/logs || true - sudo chown kafka:kafka /opt/kafka/logs - sudo chmod 0766 /opt/kafka/logs - # Right number of partition is crucial for performance optimization, - # in high load(real world) deployment this number should be increased. - for topic in ${KAFKA_SERVICE_TOPICS//,/ }; do - /opt/kafka/bin/kafka-topics.sh --create \ - --bootstrap-server $KAFKA_SERVICE_HOST:$KAFKA_SERVICE_PORT \ - --replication-factor 1 \ - --partitions 3 \ - --topic $topic - done -} - -function install_schema_alarm_database { - local databaseName="mon" - - if is_service_enabled mysql postgresql; then - recreate_database $databaseName - $MONASCA_API_BIN_DIR/monasca_db upgrade - fi -} - -function clean_schema { - - echo_summary "Clean Monasca Schema" - - if is_service_enabled mysql; then - sudo echo "drop database mon;" | mysql -u$DATABASE_USER -p$DATABASE_PASSWORD - elif is_service_enabled postgresql; then - sudo -u postgres psql -c "DROP DATABASE mon;" - fi - - sudo rm -rf $MONASCA_SCHEMA_DIR - -} - -function install_monasca_common_java { - echo_summary "Install monasca_common Java" - - git_clone $MONASCA_COMMON_REPO $MONASCA_COMMON_DIR $MONASCA_COMMON_BRANCH - (cd "${MONASCA_COMMON_DIR}"/java ; sudo mvn clean install -DskipTests) -} - -function clean_monasca_common_java { - echo_summary "Clean Monasca monasca_common" - - (cd "${MONASCA_COMMON_DIR}" ; sudo mvn clean) -} - -function install_monasca_api_java { - - echo_summary "Install Monasca monasca_api_java" - - (cd "${MONASCA_API_DIR}"/java ; sudo mvn clean package -DskipTests) - - local version="" - version="$(get_version_from_pom "${MONASCA_API_DIR}"/java)" - - sudo cp -f "${MONASCA_API_DIR}"/java/target/monasca-api-${version}-shaded.jar \ - /opt/monasca/monasca-api.jar - - sudo useradd --system -g monasca mon-api || true - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-api/monasca-api.service /etc/systemd/system/monasca-api.service - - if [[ "${MONASCA_METRICS_DB,,}" == 'vertica' ]]; then - - # Add the Vertica JDBC to the class path. - sudo sed -i "s/-cp \/opt\/monasca\/monasca-api.jar/-cp \/opt\/monasca\/monasca-api.jar:\/opt\/monasca\/vertica-jdbc-${VERTICA_VERSION}.jar/g" /etc/systemd/system/monasca-api.service - - sudo sed -i "s/influxdb.service/vertica.service/g" /etc/systemd/system/monasca-api.service - - fi - - sudo chown root:root /etc/systemd/system/monasca-api.service - - sudo chmod 0644 /etc/systemd/system/monasca-api.service - - sudo mkdir -p /var/log/monasca || true - - sudo chown root:monasca /var/log/monasca - - sudo chmod 0755 /var/log/monasca - - sudo mkdir -p /var/log/monasca/api || true - - sudo chown root:monasca /var/log/monasca/api - - sudo chmod 0775 /var/log/monasca/api - - sudo mkdir -p /etc/monasca || true - - sudo chown root:monasca /etc/monasca - - sudo chmod 0775 /etc/monasca - - local dbEngine="com.mysql.jdbc.jdbc2.optional.MysqlDataSource" - local dbPort=3306 - - if [[ ${MONASCA_DATABASE_USE_ORM} == "True" ]]; then - if is_service_enabled postgresql; then - dbEngine="org.postgresql.ds.PGPoolingDataSource" - dbPort=5432 - fi - fi - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-api/api-config.yml /etc/monasca/api-config.yml - sudo chown mon-api:root /etc/monasca/api-config.yml - sudo chmod 0640 /etc/monasca/api-config.yml - - sudo sed -e " - s|%KAFKA_HOST%|$SERVICE_HOST|g; - s|%MONASCA_DATABASE_USE_ORM%|$MONASCA_DATABASE_USE_ORM|g; - s|%MONASCA_API_DATABASE_ENGINE%|$dbEngine|g; - s|%MONASCA_API_SERVICE_HOST%|$MONASCA_API_SERVICE_HOST|g; - s|%MONASCA_API_SERVICE_PORT%|$MONASCA_API_SERVICE_PORT|g; - s|%MONASCA_API_ADMIN_PORT%|$MONASCA_API_ADMIN_PORT|g; - s|%DATABASE_USER%|$DATABASE_USER|g; - s|%DATABASE_HOST%|$DATABASE_HOST|g; - s|%DATABASE_PORT%|$dbPort|g; - s|%MYSQL_HOST%|$MYSQL_HOST|g; - s|%MYSQL_PORT%|$dbPort|g; - s|%DATABASE_PASSWORD%|$DATABASE_PASSWORD|g; - s|%MONASCA_METRICS_DB%|$MONASCA_METRICS_DB|g; - s|%INFLUXDB_HOST%|$SERVICE_HOST|g; - s|%INFLUXDB_PORT%|8086|g; - s|%VERTICA_HOST%|$SERVICE_HOST|g; - s|%ADMIN_PASSWORD%|$ADMIN_PASSWORD|g; - s|%KEYSTONE_SERVICE_PORT%|$KEYSTONE_SERVICE_PORT|g; - s|%KEYSTONE_SERVICE_HOST%|$KEYSTONE_SERVICE_HOST|g; - " -i /etc/monasca/api-config.yml - -} -function install_monasca-api { - echo_summary "Install Monasca monasca_api " - - git_clone $MONASCA_API_REPO $MONASCA_API_DIR $MONASCA_API_BRANCH - - if python3_enabled; then - enable_python3_package monasca-api - fi - setup_develop $MONASCA_API_DIR - - install_monasca_common - - if [[ "${MONASCA_API_USE_MOD_WSGI}" == 'True' ]]; then - pip_install uwsgi - else - pip_install_gr gunicorn - fi - - if [[ "${MONASCA_METRICS_DB,,}" == 'influxdb' ]]; then - pip_install_gr influxdb - fi - if [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - pip_install_gr cassandra-driver - fi - if is_service_enabled postgresql; then - apt_get install libpq-dev - pip_install_gr psycopg2 - elif is_service_enabled mysql; then - apt_get install libmysqlclient-dev - pip_install_gr PyMySQL - fi - -} - -function configure_monasca_api_python { - if is_service_enabled monasca-api; then - echo_summary "Configuring monasca-api python" - sudo install -d -o $STACK_USER $MONASCA_API_CONF_DIR - - sudo mkdir -p /var/log/monasca || true - - sudo chown $STACK_USER:monasca /var/log/monasca - - sudo chmod 0755 /var/log/monasca - - sudo mkdir -p /var/log/monasca/api || true - - sudo chown $STACK_USER:monasca /var/log/monasca/api - - sudo chmod 0775 /var/log/monasca/api - - # create configuration files in target locations - rm -rf $MONASCA_API_CONF $MONASCA_API_PASTE_INI $MONASCA_API_LOGGING_CONF - $MONASCA_API_BIN_DIR/oslo-config-generator \ - --config-file $MONASCA_API_DIR/config-generator/monasca-api.conf \ - --output-file /tmp/monasca-api.conf - - install -m 600 /tmp/monasca-api.conf $MONASCA_API_CONF && rm -rf /tmp/monasca-api.conf - install -m 600 $MONASCA_API_DIR/etc/api-logging.conf $MONASCA_API_LOGGING_CONF - install -m 600 $MONASCA_API_DIR/etc/api-config.ini $MONASCA_API_PASTE_INI - # create configuration files in target locations - - local dbAlarmUrl - local dbMetricDriver - if [[ "${MONASCA_METRICS_DB,,}" == 'cassandra' ]]; then - dbMetricDriver="monasca_api.common.repositories.cassandra.metrics_repository:MetricsRepository" - else - dbMetricDriver="monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository" - fi - dbAlarmUrl=`database_connection_url mon` - - # default settings - iniset "$MONASCA_API_CONF" DEFAULT region $REGION_NAME - iniset "$MONASCA_API_CONF" DEFAULT log_config_append $MONASCA_API_LOGGING_CONF - if $USE_OLD_LOG_API = true; then - iniset "$MONASCA_API_CONF" DEFAULT enable_logs_api false - else - if is_service_enabled monasca-log; then - iniset "$MONASCA_API_CONF" DEFAULT enable_logs_api true - else - iniset "$MONASCA_API_CONF" DEFAULT enable_logs_api false - fi - fi - - # logging - iniset "$MONASCA_API_LOGGING_CONF" handler_file args "('$MONASCA_API_LOG_DIR/monasca-api.log', 'a', 104857600, 5)" - - # messaging - iniset "$MONASCA_API_CONF" messaging driver "monasca_api.common.messaging.kafka_publisher:KafkaPublisher" - iniset "$MONASCA_API_CONF" kafka uri "$SERVICE_HOST:9092" - - # databases - iniset "$MONASCA_API_CONF" database connection $dbAlarmUrl - iniset "$MONASCA_API_CONF" repositories metrics_driver $dbMetricDriver - iniset "$MONASCA_API_CONF" cassandra contact_points $(ipv6_unquote $SERVICE_HOST) - iniset "$MONASCA_API_CONF" influxdb ip_address $(ipv6_unquote $SERVICE_HOST) - iniset "$MONASCA_API_CONF" influxdb port 8086 - - # keystone & security - configure_auth_token_middleware $MONASCA_API_CONF "admin" - iniset "$MONASCA_API_CONF" keystone_authtoken region_name $REGION_NAME - iniset "$MONASCA_API_CONF" keystone_authtoken project_name "admin" - iniset "$MONASCA_API_CONF" keystone_authtoken password $ADMIN_PASSWORD - - iniset "$MONASCA_API_CONF" security default_authorized_roles "monasca-user" - iniset "$MONASCA_API_CONF" security agent_authorized_roles "monasca-agent" - iniset "$MONASCA_API_CONF" security read_only_authorized_roles "monasca-read-only-user" - iniset "$MONASCA_API_CONF" security delegate_authorized_roles "monasca-agent" - - # server setup - iniset "$MONASCA_API_PASTE_INI" server:main host $MONASCA_API_SERVICE_HOST - iniset "$MONASCA_API_PASTE_INI" server:main port $MONASCA_API_SERVICE_PORT - iniset "$MONASCA_API_PASTE_INI" server:main workers $API_WORKERS - - # link configuration for the gate - ln -sf $MONASCA_API_CONF $MON_API_GATE_CONFIGURATION_DIR - ln -sf $MONASCA_API_PASTE_INI $MON_API_GATE_CONFIGURATION_DIR - ln -sf $MONASCA_API_LOGGING_CONF $MON_API_GATE_CONFIGURATION_DIR - - if [ "${MONASCA_API_USE_MOD_WSGI}" == 'True' ]; then - configure_monasca_api_python_uwsgi - fi - - fi -} - -function configure_monasca_api_python_uwsgi { - rm -rf $MONASCA_API_UWSGI_CONF - - install -m 600 $MONASCA_API_DIR/etc/api-uwsgi.ini $MONASCA_API_UWSGI_CONF - write_uwsgi_config "$MONASCA_API_UWSGI_CONF" "$MONASCA_API_BIN_DIR/monasca-api-wsgi" "/metrics" -} - -function start_monasca_api_python { - if is_service_enabled monasca-api; then - echo_summary "Starting monasca-api" - - local service_port=$MONASCA_API_SERVICE_PORT - local service_protocol=$MONASCA_API_SERVICE_PROTOCOL - local gunicorn="$MONASCA_API_BIN_DIR/gunicorn" - - restart_service memcached - if [ "${MONASCA_API_USE_MOD_WSGI}" == 'True' ]; then - service_uri=$service_protocol://$MONASCA_API_SERVICE_HOST/api/v2.0 - run_process "monasca-api" "$MONASCA_API_BIN_DIR/uwsgi --ini $MONASCA_API_UWSGI_CONF" "" - else - service_uri=$service_protocol://$MONASCA_API_SERVICE_HOST:$service_port - run_process "monasca-api" "$gunicorn --paste $MONASCA_API_PASTE_INI" - fi - - echo "Waiting for monasca-api to start..." - if ! wait_for_service $SERVICE_TIMEOUT $service_uri; then - die $LINENO "monasca-api did not start" - fi - fi -} - -function stop_monasca_api_python { - if is_service_enabled monasca-api; then - stop_process "monasca-api" || true - fi -} - -function clean_monasca_api_java { - - echo_summary "Clean Monasca monasca_api_java" - - (cd "${MONASCA_API_DIR}" ; sudo mvn clean) - - sudo rm /etc/monasca/api-config.yml - - sudo rm -rf /var/log/monasca/api - - sudo systemctl disable monasca-api - - sudo rm /etc/systemd/system/monasca-api.service - - sudo rm /opt/monasca/monasca-api.jar - - sudo rm /var/log/upstart/monasca-api.log* - - sudo userdel mon-api -} - -function clean_monasca_api_python { - - echo_summary "Clean Monasca monasca_api_python" - - sudo rm -rf /etc/monasca/monasca-api.conf - sudo rm -rf /etc/monasca/api-logging.conf - sudo rm -rf /etc/monasca/api-config.ini - sudo rm -rf $MON_API_GATE_CONFIGURATION_DIR - sudo rm -rf $MONASCA_API_LOG_DIR - - if is_service_enabled postgresql; then - apt_get purge libpq-dev - elif is_service_enabled mysql; then - apt_get purge libmysqlclient-dev - fi - - if [ "$MONASCA_API_USE_MOD_WSGI" == "True" ]; then - clean_monasca_api_uwsgi - fi - -} - -function clean_monasca_api_uwsgi { - sudo rm -rf $MONASCA_API_UWSGI_CONF -} - -function start_monasca_api { - if [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'java' ]]; then - start_service monasca-api || restart_service monasca-api - elif [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'python' ]]; then - start_monasca_api_python - fi -} - -function stop_monasca_api { - if [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'java' ]]; then - stop_service monasca-api || true - elif [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'python' ]]; then - stop_monasca_api_python - fi -} - -function configure_monasca_api { - if [[ "${MONASCA_API_IMPLEMENTATION_LANG,,}" == 'python' ]]; then - configure_monasca_api_python - fi - #NOTE(basiaka) Refactor of monasca-api in Java version will be handled in another change -} - -function install_monasca_thresh { - - echo_summary "Install Monasca monasca_thresh" - - git_clone $MONASCA_THRESH_REPO $MONASCA_THRESH_DIR $MONASCA_THRESH_BRANCH - (cd "${MONASCA_THRESH_DIR}"/thresh ; sudo mvn clean package -DskipTests) - - local version="" - version="$(get_version_from_pom "${MONASCA_THRESH_DIR}"/thresh)" - - sudo cp -f "${MONASCA_THRESH_DIR}"/thresh/target/monasca-thresh-${version}-shaded.jar \ - /opt/monasca/monasca-thresh.jar - - sudo useradd --system -g monasca mon-thresh || true - - sudo mkdir -p /etc/monasca || true - - sudo chown root:monasca /etc/monasca - - sudo chmod 0775 /etc/monasca - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-thresh/thresh-config.yml /etc/monasca/thresh-config.yml - - sudo chown root:monasca /etc/monasca/thresh-config.yml - - sudo chmod 0640 /etc/monasca/thresh-config.yml - - local dbEngine="org.mariadb.jdbc.Driver" - local dbPort=3306 - - if [[ ${MONASCA_DATABASE_USE_ORM} == "True" ]]; then - if is_service_enabled postgresql; then - dbEngine="org.postgresql.ds.PGPoolingDataSource" - dbPort=5432 - fi - fi - - sudo sed -e " - s|%KAFKA_HOST%|$SERVICE_HOST|g; - s|%MONASCA_THRESH_DATABASE_ENGINE%|$dbEngine|g; - s|%DATABASE_USER%|$DATABASE_USER|g; - s|%MONASCA_DATABASE_USE_ORM%|$MONASCA_DATABASE_USE_ORM|g; - s|%DATABASE_TYPE%|$DATABASE_TYPE|g; - s|%DATABASE_HOST%|$DATABASE_HOST|g; - s|%DATABASE_PASSWORD%|$DATABASE_PASSWORD|g; - s|%DATABASE_PORT%|$dbPort|g; - s|%MONASCA_STATSD_PORT%|$MONASCA_STATSD_PORT|g; - " -i /etc/monasca/thresh-config.yml - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-thresh/monasca-thresh /etc/init.d/monasca-thresh - - sudo chown root:root /etc/init.d/monasca-thresh - - sudo chmod 0744 /etc/init.d/monasca-thresh - - sudo systemctl enable monasca-thresh - -} - -function clean_monasca_thresh { - - echo_summary "Clean Monasca monasca_thresh" - - (cd "${MONASCA_THRESH_DIR}"/thresh ; sudo mvn clean) - - sudo systemctl disable monasca-thresh - - sudo rm /etc/init.d/monasca-thresh - - sudo rm /etc/monasca/thresh-config.yml - - sudo userdel mon-thresh || true - - sudo rm /opt/monasca/monasca-thresh.jar - -} - -function create_accounts { - - local projects=("mini-mon" "admin" "demo") - declare -A users=( - ["mini-mon"]="password" - ["monasca-agent"]="password" - ["admin"]="${ADMIN_PASSWORD}" - ["demo"]="${ADMIN_PASSWORD}" - ["monasca-read-only-user"]="password" - ) - local roles=("monasca-user" "monasca-agent" "admin" "monasca-read-only-user") - - for project in "${projects[@]}"; do - get_or_create_project "${project}" - done - for user in "${!users[@]}"; do - local password - password="${users[$user]}" - get_or_create_user "${user}" "${password}" - done - for role in "${roles[@]}"; do - get_or_create_role "${role}" - done - - # create assignments - # args=> - get_or_add_user_project_role "monasca-user" "mini-mon" "mini-mon" - get_or_add_user_project_role "monasca-user" "admin" "admin" - get_or_add_user_project_role "monasca-user" "demo" "demo" - - get_or_add_user_project_role "admin" "mini-mon" "mini-mon" - - get_or_add_user_project_role "monasca-agent" "monasca-agent" "mini-mon" - - get_or_add_user_project_role "monasca-read-only-user" "monasca-read-only-user" "mini-mon" - - # crate service - get_or_create_service "monasca" "${MONASCA_SERVICE_TYPE}" "Monasca Monitoring Service" - - # create endpoint - get_or_create_endpoint \ - "monasca" \ - "${REGION_NAME}" \ - "${MONASCA_API_URI_V2}" \ - "${MONASCA_API_URI_V2}" \ - "${MONASCA_API_URI_V2}" - - if is_service_enabled monasca-log; then - local log_search_url="http://$KIBANA_SERVICE_HOST:$KIBANA_SERVICE_PORT/" - - get_or_create_service "logs" "logs" "Monasca Log service" - get_or_create_endpoint \ - "logs" \ - "{$REGION_NAME}" \ - "{$MONASCA_API_URI_V2}" \ - "{$MONASCA_API_URI_V2}" \ - "{$MONASCA_API_URI_V2}" - - get_or_create_service "logs-search" "logs-search" "Monasca Log search service" - get_or_create_endpoint \ - "logs-search" \ - "$REGION_NAME" \ - "$log_search_url" \ - "$log_search_url" \ - "$log_search_url" - - fi -} - -function install_keystone_client { - PIP_VIRTUAL_ENV=/opt/monasca - - install_keystoneclient - install_keystoneauth - - unset PIP_VIRTUAL_ENV -} - -function install_monasca_agent { - - if is_service_enabled monasca-agent; then - echo_summary "Install Monasca monasca_agent" - - apt_get install python3-yaml libxml2-dev libxslt1-dev - - MONASCA_AGENT_EXTRAS="kafka_plugin" - if is_service_enabled nova && [ "$VIRT_DRIVER" = "libvirt" ]; then - apt_get install libvirt-dev - MONASCA_AGENT_EXTRAS=${MONASCA_AGENT_EXTRAS},libvirt - fi - - git_clone $MONASCA_CLIENT_REPO $MONASCA_CLIENT_DIR $MONASCA_CLIENT_BRANCH - git_clone $MONASCA_AGENT_REPO $MONASCA_AGENT_DIR $MONASCA_AGENT_BRANCH - - sudo mkdir -p /opt/monasca-agent || true - sudo chown $STACK_USER:monasca /opt/monasca-agent - - # TODO: remove the trailing pip version when a proper fix - # arrives for handling this bug https://github.com/pypa/pip/issues/8210 - # Similar issue: https://bugs.launchpad.net/devstack/+bug/1906322 - if python3_enabled; then - (cd /opt/monasca-agent ; - virtualenv -p python3 . ; - bin/python3 -m pip install --upgrade pip==20.2.3) - sudo rm -rf /opt/stack/monasca-common/.eggs/ - else - (cd /opt/monasca-agent ; virtualenv .) - fi - - PIP_VIRTUAL_ENV=/opt/monasca-agent - - setup_install $MONASCA_AGENT_DIR $MONASCA_AGENT_EXTRAS - setup_dev_lib "python-monascaclient" - - unset PIP_VIRTUAL_ENV - - sudo mkdir -p /etc/monasca/agent/conf.d || true - - sudo chown root:root /etc/monasca/agent/conf.d - - sudo chmod 0755 /etc/monasca/agent/conf.d - - sudo mkdir -p /usr/lib/monasca/agent/custom_checks.d || true - - sudo chown root:root /usr/lib/monasca/agent/custom_checks.d - - sudo chmod 0755 /usr/lib/monasca/agent/custom_checks.d - - sudo mkdir -p /usr/lib/monasca/agent/custom_detect.d || true - - sudo chown root:root /usr/lib/monasca/agent/custom_detect.d - - sudo chmod 0755 /usr/lib/monasca/agent/custom_detect.d - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/host_alive.yaml /etc/monasca/agent/conf.d/host_alive.yaml - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/http_check.yaml /etc/monasca/agent/conf.d/http_check.yaml - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/kafka_consumer.yaml /etc/monasca/agent/conf.d/kafka_consumer.yaml - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/mysql.yaml /etc/monasca/agent/conf.d/mysql.yaml - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/process.yaml /etc/monasca/agent/conf.d/process.yaml - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/zk.yaml /etc/monasca/agent/conf.d/zk.yaml - - sudo sed -i "s/127\.0\.0\.1/$(hostname)/" /etc/monasca/agent/conf.d/*.yaml - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/monasca-agent/monasca-reconfigure /usr/local/bin/monasca-reconfigure - - sudo chown root:root /usr/local/bin/monasca-reconfigure - - sudo chmod 0750 /usr/local/bin/monasca-reconfigure - - sudo sed -e " - s|%MONASCA_STATSD_PORT%|$MONASCA_STATSD_PORT|g; - s|%MONASCA_SERVICE_TYPE%|$MONASCA_SERVICE_TYPE|g; - s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI|g; - s|%SERVICE_DOMAIN_NAME%|$SERVICE_DOMAIN_NAME|g; - s|%REGION_NAME%|$REGION_NAME|g; - " -i /usr/local/bin/monasca-reconfigure - fi -} - -function clean_monasca_agent { - - if is_service_enabled monasca-agent; then - echo_summary "Clean Monasca monasca_agent" - - sudo rm /etc/init.d/monasca-agent - - sudo rm /usr/local/bin/monasca-reconfigure - - sudo rm /etc/monasca/agent/conf.d/host_alive.yaml - - sudo chown root:root /etc/monasca/agent/conf.d/host_alive.yaml - - chmod 0644 /etc/monasca/agent/conf.d/host_alive.yaml - - sudo rm -rf /usr/lib/monasca/agent/custom_detect.d - - sudo rm -rf /usr/lib/monasca/agent/custom_checks.d - - sudo rm -rf /etc/monasca/agent/conf.d - - sudo rm -rf /etc/monasca/agent - - sudo rm -rf /opt/monasca-agent - - [[ -f /etc/systemd/system/monasca-agent.target ]] && sudo rm /etc/systemd/system/monasca-agent.target - [[ -f /etc/systemd/system/monasca-collector.service ]] && sudo rm /etc/systemd/system/monasca-collector.service - [[ -f /etc/systemd/system/monasca-forwarder.service ]] && sudo rm /etc/systemd/system/monasca-forwarder.service - [[ -f /etc/systemd/system/monasca-statsd.service ]] && sudo rm /etc/systemd/system/monasca-statsd.service - - apt_get purge libxslt1-dev - apt_get purge libxml2-dev - apt_get purge python3-yaml - fi -} - -# install nodejs and npm packages, works behind corporate proxy -# and does not result in gnutsl_handshake error -function install_nodejs { - - echo_summary "Install Node.js" - curl -sL https://deb.nodesource.com/setup_18.x | sudo bash - - - apt_get install nodejs - npm config set registry "http://registry.npmjs.org/"; \ - npm config set proxy "${HTTP_PROXY}"; \ - npm set strict-ssl false; -} - -function init_monasca_grafana { - echo_summary "Init Grafana" - - sudo cp -f -r "${MONASCA_API_DIR}"/devstack/files/grafana/dashboards.d "${DASHBOARDS_DIR}" - sudo chown -R root:root "${DASHBOARDS_DIR}" - sudo chmod -R 0644 "${DASHBOARDS_DIR}" - - - if python3_enabled; then - sudo python3 "${MONASCA_API_DIR}"/devstack/files/grafana/grafana-init.py - else - sudo python "${MONASCA_API_DIR}"/devstack/files/grafana/grafana-init.py - fi - - sudo rm -rf "${DASHBOARDS_DIR}" -} - -function install_monasca_grafana { - - echo_summary "Install Grafana" - - if [ ! -d "${GRAFANA_DIR}" ]; then - git_timed clone $GRAFANA_REPO $GRAFANA_DIR --branch $GRAFANA_BRANCH --depth 1 - fi - - npm config set python /usr/bin/python3 - - cd "${MONASCA_BASE}" - - mkdir grafana-build || true - cd grafana-build - export GOPATH=`pwd` - mkdir -p $GOPATH/src/github.com/grafana - cd $GOPATH/src/github.com/grafana - cp -rf "${GRAFANA_DIR}" . - - cd grafana - cp "${MONASCA_UI_DIR}"/grafana-dashboards/* ./public/dashboards/ - - go run build.go build - - npm config set unsafe-perm true - npm install - sudo npm install -g grunt-cli - grunt --force - - cd "${MONASCA_BASE}" - sudo rm -r grafana - - sudo useradd grafana || true - sudo mkdir /etc/grafana || true - sudo mkdir /var/lib/grafana || true - sudo mkdir /var/lib/grafana/plugins || true - sudo mkdir /var/log/grafana || true - - git_clone $MONASCA_GRAFANA_DATASOURCE_REPO $MONASCA_GRAFANA_DATASOURCE_DIR $MONASCA_GRAFANA_DATASOURCE_BRANCH - sudo ln -sfF "${MONASCA_GRAFANA_DATASOURCE_DIR}" /var/lib/grafana/plugins/monasca-grafana-datasource - - sudo chown -R grafana:grafana /var/lib/grafana /var/log/grafana - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/grafana/grafana.ini /etc/grafana/grafana.ini - sudo sed -e " - s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI|g; - " -i /etc/grafana/grafana.ini - - sudo cp -f "${MONASCA_API_DIR}"/devstack/files/grafana/grafana-server /etc/init.d/grafana-server - sudo sed -i "s#/usr/sbin#"${MONASCA_BASE}"/grafana-build/src/github.com/grafana/grafana/bin#g" /etc/init.d/grafana-server - sudo sed -i "s#/usr/share#"${MONASCA_BASE}"/grafana-build/src/github.com/grafana#g" /etc/init.d/grafana-server - - sudo systemctl enable grafana-server -} - -function clean_nodejs { - apt_get purge nodejs npm -} - -function clean_monasca_grafana { - - sudo rm -f "${MONASCA_BASE}"/grafana-build - - sudo systemctl disable grafana-server - - sudo rm /etc/init.d/grafana-server - - sudo rm -r /etc/grafana - - sudo rm -r /var/lib/grafana - - sudo rm -r /var/log/grafana - -} - -function install_go { - echo_summary "Install Go ${GO_VERSION}" - - local go_tarball=go${GO_VERSION}.linux-amd64.tar.gz - local go_tarball_url=https://storage.googleapis.com/golang/${go_tarball} - - local go_tarball_dest - go_tarball_dest=`get_extra_file ${go_tarball_url}` - - sudo tar -C /usr/local -xzf ${go_tarball_dest} - export PATH=$PATH:/usr/local/go/bin -} - -function clean_go { - echo_summary "Clean Go ${GO_VERSION}" - - sudo rm -f ${FILES}/go${GO_VERSION}* - sudo rm -rf /usr/local/go* - export PATH=$(echo $PATH | sed -e 's|:/usr/local/go/bin||') -} - -###### extra functions - -# Validate a program version string is of the form 1.0.0. -# Return 0 if a valid program version string, otherwise 1. -function validate_version { - version_regex="^([0-9]+\.)?([0-9]+\.)?([0-9]+)$" - - if [[ $1 =~ $version_regex ]]; then - return 0 - else - return 1 - fi -} - -# Prints the version specified in the pom.xml file in the directory given by -# the argument -function get_version_from_pom { - python -c "import xml.etree.ElementTree as ET; \ - print(ET.parse(open('$1/pom.xml')).getroot().find( \ - '{http://maven.apache.org/POM/4.0.0}version').text)" -} - -function install_monasca_common { - git_clone $MONASCA_COMMON_REPO $MONASCA_COMMON_DIR $MONASCA_COMMON_BRANCH - setup_dev_lib "monasca-common" -} - -function install_monasca_statsd { - git_clone $MONASCA_STATSD_REPO $MONASCA_STATSD_DIR $MONASCA_STATSD_BRANCH - setup_dev_lib "monasca-statsd" -} - -function install_gate_config_holder { - sudo install -d -o $STACK_USER $MON_API_GATE_CONFIGURATION_DIR -} - -function find_nearest_apache_mirror { - if [ -z $APACHE_MIRROR ]; then - local mirror; - mirror=`curl -s 'https://www.apache.org/dyn/closer.cgi?as_json=1' | jq --raw-output '.preferred'` - APACHE_MIRROR=$mirror - fi -} - -# This solution fixes problem with privileges for agent -# to gather metrics from services started as root user. -function init_collector_service { - if is_service_enabled monasca-agent; then - echo_summary "Init Monasca collector service" - sudo systemctl stop monasca-collector - sudo sed -i "s/User=mon-agent/User=root/g" /etc/systemd/system/monasca-collector.service - sudo sed -i "s/Group=mon-agent/Group=root/g" /etc/systemd/system/monasca-collector.service - sudo systemctl daemon-reload - sudo systemctl restart monasca-collector - fi -} - -function configure_tempest_for_monasca { - iniset $TEMPEST_CONFIG monitoring kibana_version $KIBANA_VERSION -} - -# check for service enabled -if is_service_enabled monasca; then - - if [[ "$1" == "stack" && "$2" == "pre-install" ]]; then - # Set up system services - echo_summary "Configuring Monasca system services" - pre_install_monasca - - elif [[ "$1" == "stack" && "$2" == "install" ]]; then - # Perform installation of service source - echo_summary "Installing Monasca" - install_monasca - - elif [[ "$1" == "stack" && "$2" == "test-config" ]]; then - if is_service_enabled tempest; then - echo_summary "Configuring Tempest for Monasca" - configure_tempest_for_monasca - fi - - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - # Configure after the other layer 1 and 2 services have been configured - echo_summary "Configuring Monasca" - configure_monasca - - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - # Initialize and start the Monasca service - echo_summary "Initializing Monasca" - extra_monasca - fi - - if [[ "$1" == "unstack" ]]; then - # Shut down Monasca services - echo_summary "Unstacking Monasca" - unstack_monasca - fi - - if [[ "$1" == "clean" ]]; then - # Remove state and transient data - # Remember clean.sh first calls unstack.sh - echo_summary "Cleaning Monasca" - clean_monasca - fi -fi - -# check for service enabled -if is_service_enabled monasca-log; then - - if [[ "$1" == "stack" && "$2" == "pre-install" ]]; then - # Set up system services - echo_summary "Configuring Monasca Log Management system services" - pre_install_logs_services - - elif [[ "$1" == "stack" && "$2" == "install" ]]; then - # Perform installation of service source - echo_summary "Installing Monasca Log Management" - install_monasca_log - - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - # Configure after the other layer 1 and 2 services have been configured - echo_summary "Configuring Monasca Log Management" - configure_monasca_log - - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - # Initialize and start the Monasca service - echo_summary "Initializing Monasca Log Management" - init_monasca_log - init_monasca_grafana_dashboards - if is_service_enabled monasca-agent; then - init_agent - fi - start_monasca_log - fi - - if [[ "$1" == "unstack" ]]; then - # Shut down Monasca services - echo_summary "Unstacking Monasca Log Management" - stop_monasca_log - delete_kafka_topics - fi - - if [[ "$1" == "clean" ]]; then - # Remove state and transient data - # Remember clean.sh first calls unstack.sh - echo_summary "Cleaning Monasca Log Management" - clean_monasca_log - fi -fi - -#Restore errexit -$ERREXIT - -# Restore xtrace -$XTRACE diff --git a/devstack/settings b/devstack/settings deleted file mode 100644 index 0ed2398bf..000000000 --- a/devstack/settings +++ /dev/null @@ -1,259 +0,0 @@ -# -# (C) Copyright 2015 Hewlett Packard Enterprise Development LP -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -#    http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# The following two variables allow switching between java and python for the implementations -# of the Monasca API and the Monasca Persister. These variables can be set here -# or in the DevStack local.conf file. - -# MONASCA_API_IMPLEMENTATION_LANG=${MONASCA_API_IMPLEMENTATION_LANG:-java} -MONASCA_API_IMPLEMENTATION_LANG=${MONASCA_API_IMPLEMENTATION_LANG:-python} - -# MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-java} -MONASCA_PERSISTER_IMPLEMENTATION_LANG=${MONASCA_PERSISTER_IMPLEMENTATION_LANG:-python} - -# MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-vertica} -# MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-cassandra} -MONASCA_METRICS_DB=${MONASCA_METRICS_DB:-influxdb} - -# Turn on all the Monasca services by default. Currently enabling specific services -# has no effect. All services are enabled by default. There is currently no mechanism -# implemented to turn off specific Monasca services. - -# -# Monasca top level service that enables all other services -# -enable_service monasca - -# -# Monasca infrastructure services -# - -# devstack zookeeper -enable_service monasca-zookeeper - -# Monasca databases -# MySQL is already enabled in devstack -enable_service monasca-influxdb - -# Apache Storm -enable_service monasca-storm -enable_service monasca-storm-nimbus -enable_service monasca-storm-supervisor -disable_service monasca-storm-ui -disable_service monasca-storm-logviewer - -# monasca-kafka depends on monasca-zookeeper -enable_service monasca-kafka - -# -# Monasca core services -# - -# monasca-api depends on monasca-influxdb, MySQL, monasca-kafka -enable_service monasca-api - -# monasca-persister depends on monasca-influxdb, monasca-kafka -enable_service monasca-persister - -# monasca-notification depends on MySQL, monasca-kafka -enable_service monasca-notification - -# monasca-thresh depends on MySQL, monasca-kafka, monasca-storm -enable_service monasca-thresh - -# monasca-agent depends on monasca-api -enable_service monasca-agent - -# monasca-cli depends on monasca-api -enable_service monasca-cli - -# -# Monasca logs services -# - -enable_service kibana -enable_service elasticsearch -enable_service monasca-log -enable_service monasca-log-persister -enable_service monasca-log-transformer -enable_service monasca-log-metrics -enable_service monasca-log-agent - -# -# Dependent Software Versions -# - -# Set the InfluxDB version to use for the Java and Python API -# InfluxDB has modified the result sets for SHOW SERIES. The -# Python API has been modified to support those changes, but the -# Java API hasn't yet. These two environment variables allow you -# to deploy either the Java or Python API without having to -# also set the INFLUXDB_VERSION when switching between the two. -INFLUXDB_JAVA_VERSION=${INFLUXDB_JAVA_VERSION:-0.9.5} -INFLUXDB_PYTHON_VERSION=${INFLUXDB_PYTHON_VERSION:-1.7.6} - -# To set the same version of InfluxDB for both languages use the -# following variable. This will override both the Java and Python -# specific variables above. -# INFLUXDB_VERSION=${INFLUXDB_VERSION:-0.9.5} -INFLUXDB_DEB_URL=${INFLUXDB_DEB_URL:-https://dl.influxdata.com/influxdb/releases/} - -VERTICA_VERSION=${VERTICA_VERSION:-8.0.0-0} -CASSANDRA_VERSION=${CASSANDRA_VERSION:-311x} -ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.8.4} # 3.4.13 default in Focal; 3.4.10 default in Bionic -# Kafka deb consists of the version of scala plus the version of kafka -BASE_KAFKA_VERSION=${BASE_KAFKA_VERSION:-3.7.2} -SCALA_VERSION=${SCALA_VERSION:-2.13} -KAFKA_VERSION=${KAFKA_VERSION:-${SCALA_VERSION}-${BASE_KAFKA_VERSION}} -STORM_VERSION=${STORM_VERSION:-1.2.2} -GO_VERSION=${GO_VERSION:-"1.7.1"} -NODE_JS_VERSION=${NODE_JS_VERSION:-"4.0.0"} -NVM_VERSION=${NVM_VERSION:-"0.32.1"} -KIBANA_VERSION=${KIBANA_VERSION:-7.3.0} -LOGSTASH_VERSION=${LOGSTASH_VERSION:-7.3.0} -ELASTICSEARCH_VERSION=${ELASTICSEARCH_VERSION:-7.3.0} -LOGSTASH_OUTPUT_MONASCA_VERSION=${LOGSTASH_OUTPUT_MONASCA_VERSION:-2.0.0} - -# Needed for plugin build environment initialization -KIBANA_DEV_BRANCH=${KIBANA_DEV_BRANCH:-7.3} -KIBANA_DEV_REPO="https://github.com/elastic/kibana" - -MONASCA_KIBANA_PLUGIN_REPO=${MONASCA_KIBANA_PLUGIN_REPO:-${GIT_BASE}/openstack/monasca-kibana-plugin.git} -MONASCA_KIBANA_PLUGIN_BRANCH=${MONASCA_KIBANA_PLUGIN_BRANCH:-master} -MONASCA_KIBANA_PLUGIN_DIR=${DEST}/monasca-kibana-plugin - -# Path settings -MONASCA_BASE=${DEST} -MONASCA_SCHEMA_DIR=${DEST}/monasca/schema - -# Repository settings -MONASCA_API_REPO=${MONASCA_API_REPO:-${GIT_BASE}/openstack/monasca-api.git} -MONASCA_API_BRANCH=${MONASCA_API_BRANCH:-master} -MONASCA_API_DIR=${MONASCA_BASE}/monasca-api - -MONASCA_PERSISTER_REPO=${MONASCA_PERSISTER_REPO:-${GIT_BASE}/openstack/monasca-persister.git} -MONASCA_PERSISTER_BRANCH=${MONASCA_PERSISTER_BRANCH:-master} -MONASCA_PERSISTER_DIR=${MONASCA_BASE}/monasca-persister - -MONASCA_NOTIFICATION_REPO=${MONASCA_NOTIFICATION_REPO:-${GIT_BASE}/openstack/monasca-notification.git} -MONASCA_NOTIFICATION_BRANCH=${MONASCA_NOTIFICATION_BRANCH:-master} -MONASCA_NOTIFICATION_DIR=${MONASCA_BASE}/monasca-notification - -MONASCA_THRESH_REPO=${MONASCA_THRESH_REPO:-${GIT_BASE}/openstack/monasca-thresh.git} -MONASCA_THRESH_BRANCH=${MONASCA_THRESH_BRANCH:-master} -MONASCA_THRESH_DIR=${MONASCA_BASE}/monasca-thresh - -MONASCA_CLIENT_REPO=${MONASCA_CLIENT_REPO:-${GIT_BASE}/openstack/python-monascaclient.git} -MONASCA_CLIENT_BRANCH=${MONASCA_CLIENT_BRANCH:-master} -MONASCA_CLIENT_DIR=${MONASCA_BASE}/python-monascaclient - -MONASCA_AGENT_REPO=${MONASCA_AGENT_REPO:-${GIT_BASE}/openstack/monasca-agent.git} -MONASCA_AGENT_BRANCH=${MONASCA_AGENT_BRANCH:-master} -MONASCA_AGENT_DIR=${MONASCA_BASE}/monasca-agent - -MONASCA_UI_REPO=${MONASCA_UI_REPO:-${GIT_BASE}/openstack/monasca-ui.git} -MONASCA_UI_BRANCH=${MONASCA_UI_BRANCH:-master} -MONASCA_UI_DIR=${MONASCA_BASE}/monasca-ui - -MONASCA_COMMON_REPO=${MONASCA_COMMON_REPO:-${GIT_BASE}/openstack/monasca-common.git} -MONASCA_COMMON_BRANCH=${MONASCA_COMMON_BRANCH:-master} -MONASCA_COMMON_DIR=${MONASCA_BASE}/monasca-common - -MONASCA_STATSD_REPO=${MONASCA_STATSD_REPO:-${GIT_BASE}/openstack/monasca-statsd.git} -MONASCA_STATSD_BRANCH=${MONASCA_STATSD_BRANCH:-master} -MONASCA_STATSD_DIR=${MONASCA_BASE}/monasca-statsd - -MONASCA_GRAFANA_DATASOURCE_REPO=${MONASCA_GRAFANA_DATASOURCE_REPO:-${GIT_BASE}/openstack/monasca-grafana-datasource.git} -MONASCA_GRAFANA_DATASOURCE_BRANCH=${MONASCA_GRAFANA_DATASOURCE_BRANCH:-master} -MONASCA_GRAFANA_DATASOURCE_DIR=${MONASCA_BASE}/monasca-grafana-datasource - -GRAFANA_REPO=${GRAFANA_REPO:-"https://github.com/monasca/grafana.git"} -GRAFANA_BRANCH=${GRAFANA_BRANCH:-"grafana4"} -GRAFANA_DIR=${MONASCA_BASE}/grafana -GRAFANA_INIT_LOG_LEVEL=DEBUG -GRAFANA_URL=http://localhost:3000 -GRAFANA_USERNAME=mini-mon -GRAFANA_PASSWORD=password -DATASOURCE_TYPE=monasca -DATASOURCE_URL=http://localhost/metrics -DATASOURCE_ACCESS_MODE=proxy -DATASOURCE_AUTH=Keystone -DASHBOARDS_DIR=/dashboards.d - -GITDIR["python-monascaclient"]=${MONASCA_CLIENT_DIR} -GITDIR["monasca-common"]=${MONASCA_COMMON_DIR} -GITDIR["monasca-statsd"]=${MONASCA_STATSD_DIR} - -# Database settings -MONASCA_DATABASE_USE_ORM=false - -# Other settings -DOWNLOAD_FILE_TIMEOUT=${DOWNLOAD_FILE_TIMEOUT:-30} -PLUGIN_FILES=$MONASCA_API_DIR/devstack/files -# APACHE_MIRROR= # force specific APACHE_MIRROR if the one that got picked fails -# Apache Kafka 0.9.0.1 is only available in Apache Archives -APACHE_ARCHIVES=${APACHE_ARCHIVES:-"https://archive.apache.org/dist/"} - -# Public configuration -## monasca-statsd -MONASCA_STATSD_PORT=${MONASCA_STATSD_PORT:-8125} - -## monasca-api -MONASCA_API_SERVICE_HOST=${MONASCA_API_SERVICE_HOST:-${SERVICE_HOST}} -MONASCA_API_SERVICE_PORT=${MONASCA_API_SERVICE_PORT:-8070} -MONASCA_API_ADMIN_PORT=${MONASCA_API_ADMIN_PORT:-8081} # for java -MONASCA_API_SERVICE_PROTOCOL=${MONASCA_API_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}} -MONASCA_API_SERVICE_TIMEOUT=${MONASCA_API_SERVICE_TIMEOUT:-${SERVICE_TIMEOUT}} - -MONASCA_API_CONF_DIR=${MONASCA_API_CONF_DIR:-/etc/monasca} -MONASCA_API_CONF=${MONASCA_API_CONF:-$MONASCA_API_CONF_DIR/monasca-api.conf} -MONASCA_API_PASTE_INI=${MONASCA_API_PASTE_INI:-$MONASCA_API_CONF_DIR/api-config.ini} -MONASCA_API_LOGGING_CONF=${MONASCA_API_LOGGING_CONF:-$MONASCA_API_CONF_DIR/api-logging.conf} -MONASCA_API_LOG_DIR=${MONASCA_API_LOG_DIR:-/var/log/monasca/api} -MONASCA_API_USE_MOD_WSGI=${MONASCA_API_USE_MOD_WSGI:-$ENABLE_HTTPD_MOD_WSGI_SERVICES} -MONASCA_API_UWSGI_CONF=${MONASCA_API_UWSGI_CONF:-$MONASCA_API_CONF_DIR/api-uwsgi.ini} - -# OLD LOG-API CONFIGURATION -MONASCA_LOG_API_SERVICE_HOST=${MONASCA_LOG_API_SERVICE_HOST:-${SERVICE_HOST}} -MONASCA_LOG_API_SERVICE_PORT=${MONASCA_LOG_API_SERVICE_PORT:-5607} -MONASCA_LOG_API_REPO=${MONASCA_LOG_API_REPO:-${GIT_BASE}/openstack/monasca-log-api.git} -MONASCA_LOG_API_BRANCH=${MONASCA_LOG_API_BRANCH:-master} -MONASCA_LOG_API_DIR=${DEST}/monasca-log-api -MONASCA_LOG_API_DEPLOY=uwsgi -MONASCA_LOG_API_CONF_DIR=${MONASCA_LOG_API_CONF_DIR:-/etc/monasca} -MONASCA_LOG_API_LOG_DIR=${MONASCA_LOG_API_LOG_DIR:-/var/log/monasca} -MONASCA_LOG_API_CACHE_DIR=${MONASCA_LOG_API_CACHE_DIR:-/var/cache/monasca-log-api} -MONASCA_LOG_API_WSGI_DIR=${MONASCA_LOG_API_WSGI_DIR:-/var/www/monasca-log-api} - -MONASCA_LOG_API_CONF=${MONASCA_LOG_API_CONF:-$MONASCA_LOG_API_CONF_DIR/monasca-log-api.conf} -MONASCA_LOG_API_PASTE=${MONASCA_LOG_API_PASTE:-$MONASCA_LOG_API_CONF_DIR/log-api-paste.ini} -MONASCA_LOG_API_LOGGING_CONF=${MONASCA_LOG_API_LOGGING_CONF:-$MONASCA_LOG_API_CONF_DIR/log-api-logging.conf} -MONASCA_LOG_API_UWSGI_CONF=${MONASCA_LOG_API_UWSGI_CONF:-$MONASCA_LOG_API_CONF_DIR/log-api-uwsgi.ini} - -USE_PYTHON3=${USE_PYTHON3:-true} -USE_OLD_LOG_API=${USE_OLD_LOG_API:-false} - -## storm settings -STORM_UI_HOST=${STORM_UI_HOST:-${SERVICE_HOST}} -STORM_UI_PORT=${STORM_UI_PORT:-8089} -STORM_LOGVIEWER_PORT=${STORM_LOGVIEWER_PORT:-8090} - -KAFKA_SERVICE_HOST=${KAFKA_SERVICE_HOST:-${SERVICE_HOST}} -KAFKA_SERVICE_PORT=${KAFKA_SERVICE_PORT:-9092} -KAFKA_SERVICE_TOPICS=${KAFKA_SERVICE_TOPICS:-metrics,events,alarm-state-transitions,alarm-notifications,retry-notifications,60-seconds-notifications} -KAFKA_SERVICE_LOG_TOPICS=${KAFKA_SERVICE_LOG_TOPICS:-log,transformed-log} diff --git a/doc/api-samples/.gitkeep b/doc/api-samples/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/doc/api-samples/empty.json b/doc/api-samples/empty.json deleted file mode 100644 index c267aa9bd..000000000 --- a/doc/api-samples/empty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "test": {} -} diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index c35efa85d..000000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -# documentation -sphinx>=2.0.0,!=2.1.0 # BSD -os-api-ref>=1.4.0 # Apache-2.0 -reno>=3.1.0 # Apache-2.0 -openstackdocstheme>=2.2.1 # Apache-2.0 -SQLAlchemy>=1.3.0 # MIT -oslo.config>=6.8.0 # Apache-2.0 diff --git a/doc/source/.gitkeep b/doc/source/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/doc/source/_static/images/architecture.png b/doc/source/_static/images/architecture.png deleted file mode 100644 index b5dd78a06dd95f7bb15e1dd705d2f587db58a916..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 88600 zcmZ^KWk8f&*Dfj&N~&}zh^TZos0<(?0@5Je-CYXOCDN^cbR#VwASF383`0r|49(D- zJv{Ng=X~Gc$MeW=&)(}^ajmu2zE+6B8)^Jol(#T2Fz{twLf&Fv+^omIxS@fA1wQH4 z55A6pfkWw`tnOs(CheeaXC`NBWNd|j!K*r6vQdwJo{ayHaOG#CNd35d>E4~%sFZ2M zc&DT5b)4#lZ^Q0LvhHFpom3VzEMR}A{+5_jfPoqA6*d0TYBSCZq* zze0XMp1;hd*}D-7c6=rudPMl)yua_1@b}!l?&|Eov!@jdpFyEjNCZll{;_0Y&|Od(uHtN^j(QsJW3hp-l?JW2|)0YHfLIp7i>NGM5lc8 zyza-F0>L%9IH60GE}7I{!yk`gmfVi9AKcy`Okdci!|eS0ko`8LY@V*dr~S(tRi6uv zZe03(Zb@=E^1~s;(o%Y(%QD7yi(NV@NPYj-hZHvXQo1MEGK@@1XGWIAtr2fRt+7|D zDCFo+Ja=fwN-bzFY;X)?F_VxfE7|l7J$ILG*c%Z_Nfgrmk!>=;%o<`RNyy_bl&&)^ z|H(nlNyp=U`lW3yaPwnSbvCd6FP6*=r|DSMgWVlQ9p5CV!{|L~CLsdf&`gy-LLEY? z89SEen%SQx^}Syj?hB4lv8hbE z=f9x9L-8$;$4c0K9N2Gf629n@gy?@h;8M|{YrKt92n+7_H>)WuAxzzy+I1@z&-ZZ` zQ(BPx6v19g_f)|}1I{1)0vS-=RXuwmlzeCUBPu^nU2~ImGK0=F9phkd6MsV>q z&sIpg_=Fz@nbjG+BC9oa&XUZUbK_Rdoj__e+|;FTp#ziY(Zcnnal5s%d&G(MQFRi| zM!)T6A`?Ysh-|IN)x%*Ma5|+`oYI!jYfN6(gOYAi&ql}PsYLF2a~3@<{hkua|4D+w zp+WBP?fJm!;yZMI-gwuIrRAEH9Z1?%Xq|Afe;De}tlKCqe{qQV)l&^E0Ur1nbxu6sYOU7+j{4a>T5 z+BAb0i9UsA>duNpSm~k{YsN}rm^o|hyN8%tm1!AY8&cTJpK~uf@Yyg9Z+TaV>~pDa zl%*?_;|g24P?g~znY`sX>CWL8Wgurdx~v$G>B?rGB}{f(j3dxdposS_ui)?;m7b-A zTlA#XPt45vy@FQ-_mwT3V#vgm#Z9l>&k@b>e;)&l*t;p}* z_qfw!fzh=p&lOEHW2G zcHGz~{@tip%l&oN;?R*c7nYXeH}`_3TyQbZ6cSfC4ui}>?UQN4C*L@v@$FD7i#6mf zUF#)OeUKVK;|*Ex$yI6ero`RB7$DzyaM!ku`xcg?Rd8K&oS|s*ay04d*@mX{0cq#E zgaZZM{#cS4}$ zrxrt(2R~mkL_}}#OMmjgTxWeTOZV7b!JwY*_x{xCq{q1x{=mW%0~Dm2RodOD>Cw&!J1T|zt>Vv|$1#jps6X?4KuZOUF}UUBa^#b%x(Crs!@+*s)Js*+kd_kanEyVGTG)(1=I-L*FatIr$C&#E zYSvW4jkY3}YK|tq-!IJ;5tyxPu0@s3i6i8bN1y57x2bT_A(gGOMb^k!+x>@lE)?>g zbQzz8lI|!eFI0c4P{>q!4;A2fZ3+E2?<)HnpMdd8=dCg_OFw$Wx~IhFsy!6#v}4w{ zXnSrLgmW2pR^(iEZw(!WIt3zx^PV(j}m#Z zL&nv=Hb`bC`^L{Qu1Ri z^Vwb*vb}7lVk*j_G*Ammgfqma`Vu-ciU!6#$ezfiubrAb@g5IM;@V*@4d-5x@q7P@ zh18R71<${kS2^rO@T@zH=V1Qrda8vF+brE`t=`9~tOjQ?YlAGTm$EwC&wT|wEb_ln zmXSGE2X(YWupvyhy5I=qTB%$rw@LYIO_zIh14=myr@yKW+{S`FOtqZuKJ;0}OLHW> zo_8G{G0j`CbdjAI^l%Hi-AMecMwnY^uW5#A`m+sDy-BjA7sD|?uN)M`IdL^mN7 zYBg+Yq?RNv`EbgP?&cQn=qFOHflV>nNjsO#VQ2r2pw-Pv$B9ZgM4+;DIpcfOd;lC# zU3d#Z^{pe_8Or$+CiGnEe){3Ox9AiOv22G9CC@F-Btv<~$R2N|7|nxWY|tr3Bv zWwPsudbx>!FUpMySCCd_veIgv4s%HlH%4mAZiyCln%QrFe}P!QSy} zeD9)sBY>oZq(#GZV8y*ZhC^7`(RE^JVNzf9WAI-;t8I@rHa0}~t;j~Z0eV5wK#f6rEKH#5k2M4dxI3Moro(P)ur^*Kr z(46R+nLX~$5Rb~sdrb7L3j8-!+?V$2*RSMUMqR!-Iy&-=KYsLwyp%51C5Qa;V{~F+ z+VyE(wNSN~R=2@L4$&0zvd&@YmgaY}or(7LC3)if{Da#8e@h%3{8|B;DQQ|`J9~jF zZN4aQ&XtMH4kKgFbViKTgpx4xNg&4Yi2wOX`eZd(!MYBsu?-(%d=y>oaj;IZxU|Hp zix?;P=NaOp_wyoNI+!-6!)Vj=n8>=!%ai;9b}&CaLj z|M|%z{53qhcUEON{vG)!fp_9jGG=hGMOzYl#;- zA|&kWwy|eXZ9#+&=!u9^g8IkLF8xK|nx5s?vWG`cS-_3xXQGiy~_dA^X4 zs6sxJ3l$`R%V&tS59iA7%=%t_Yh4v%d+^{vS28ccp7=@DW?}fhHXe#iNr4^gtqfSg zWtv9&Quv|ehSF{x9<41c*8*;nb!5xLY@Z!FVm!-|invdor&^rcavpl`$#vouAbZ5r zo8#Rj6(|e+(MII}1_ry%kDp!942sz=i3Nm&G9e*%)XNQS>NI-Xe5Upou~q4P>N1k2 zc%!7GBu~AZzCT?wtgMV9mQ9=T&6_umwW?X(qzdeGbS9Op&eS_I7pRpIsHv$b7bxR4 zxNLH%qpS_D-^qYl8yemqey*TEG*xYVn~;zY$ZD{^3O#DRJQa6tRx8!fU+ztMtei*W z@Tcnor(uWQ@7O0q#NTb;Lf>XP2-ktsc8-n>%=9XdUy5PlWdG{j8zV9@Qa&>?^E>ik z%)#!Gtctp@fWUoM@trdT`*{f@5NkBNvhaO@CO4j)M-zX|78X32-u_>#p z%P9e&dtC#^Ovft ztBHvJ{EkCpJ*{V8V3=QBJrXC*e@b6BbQt%qGeDBBpr!bBcg^uqg0AwK$MLQ)#?N?G zt^4$L4i2rpm*+v!QM4`n{RH%WG{P#X-(-v3Hq6LeZ7f?xt|)H<#ZVCccB zt%-;yU@!(gzOgT>Hj@>M>EgbTDut@~#>HAS9paZ~H-LCm8^7b2Luf5qGaG=-(ch#M z_4r81Yw--CP@w!Is}u8~w$pgN5;i9%XRXh<&@ECH9OCC-?Q0lND73)n^5S$0?W^-Y zKipV9@?nwW$M^%h@hzkv?Bx#Oj>kX2NKbEV2A6Ca)%899c55nC&@t2Z@?xUFmHh(M zd`VH1E8bQId;sHpf0{5*y7BK=*QtJlBV-8%ll`cN8j^1OIDai4drzGp|}iiX!ev(>w9%@kJ` zH|rK_R$*W?xNfPMW?r6dHCr?i&SQ??%Sj(?Th7%oUYqhA7yrPcYhAq2J18ryXdgjH#j5WSGnkL7q_--6`V4} zyeZ$$FTLLXb_f%*;+o&YEnTFcp(&2PJVBI7Mc}@yUnL+URD+G_=0J3v*x%I>(eI2i zji-P#()rFwO{S2H=wBx{mq)PeroC%$!FOElHo1R;Zc^CpmCEjOcS z4=n?54}9Y1Am~3+t7+bYBgTrAdgN0DzQ&8Y@BYf;y!M2-p5J;3llVhW5VlT}SN_;= z0)oyQ+Q(i0@0<4~j%okVIzB#b>F&lwV+nwxYpZM$nU~v==f*-9U!MD85fLAJjR8>U zxw5$$hAh!;xQ^k1oW>>o(9j@^gg0Pg05v`XT8iek)tLrXmJ3BLw1+LuG|ustN#Yv$=kCy$bd)xKf)Uke@D_Qje@KZ&8o%c&t13%2jz!Ono5)FmJY_x zo&ty3ul2$7ijZ%TNG98wF6A?R4 z-Z>vfB9XP;r`2N;JjrFNkNye#3LH(>==>buuy)SQk^qbCK|(JM+Eeu0{ez1|KplMj z=B=MIGqj+fc>!Q$2WH!BF8bY-{{Pwgp<*&IDg0k6kUmGSeSX}B6K3YTRLAA{>}CGJoph*nlb1U$8*z(ihIf3X zH?4>pk(ihm2#os}bM0YdQcE-ow@KT6hQWYFCrd5TH=e6qs0T(8CN-ex5z zEQp9}8yaq4Q1V&f5Pv8xW(_4{C$O}%{LA%_h|SI7Lw!rj+r-aL_SZE2%h5G}MPEDP zh9N;tM2Eb3l|`K9SiQ3)2zl{s6iTh9_8cCk42q`-mlskR&X%m5`m1};06$@UZ!{L5D5>H2qJh6nOj{A z1~niC26zaI_(Nl(sDYuO{lU75^Q@rb@*F5DCu(dnp4;1${J*|;B`L@t7(Zi}X&qVbx1);Suh4NiB_g;?!)lrX7MJF^&yMb-+#D)UGbktr>_JNn6&;@(`gIT zU6YlCX{o6r*m60l(DSBKYEcg!2xbH&@2g!m7VY|b^nO3w_b5}L@bBhI|5MagVaTqo zmjVI;&mbHoz2s;cg7p3vAb#eHBk#Ww&QH%RM*zd?{!h04XTl)_(4-pWIP=5Z?Rb}w zfLh?a)7nt^|0e5=pCkE7qZNPoF#-K`Y;5c^kf1H-p0L(?qL9Va<8_WJsF@WIH45Ke zkvNNPfkf6eHAPA9H{S-d1aepIU%vVwE{^01B^;rqE@q4w?m=Yu2Vnw8Zq#7qybrN! zqjy^iN6shoKfRY}J@~BbA-~>(hYr)+%QDAGDd;_XQ$5L*2v%t6!?MwuoXK7F0jDuO ztBLm@gwXf_4v1!BMHIlt4a%ZoN#SZG@1Ezs{ibJY8&%qLvfhZo{r@DG7UNeJ=P2E9 zYQZ}o#rT6PE#$QN{^I9a5B<&g>N>8;|R|2w+6hLv%$qJx|G^hb`X7Vtg!MiIJ zUzbwR<%@Z|@6vE|n~p3BhPMl~w)^JMzWn#Jk1MD&hHtpBwmwYTv3 zvq;V!dCAM&@n$`8$F4kMrc95%=?qE4I4Ln>gd?G$&HS{4Q9KBUkfHFaH08+S1}%^A zl$V}g*z~37V5wN?%v19xr4{^kU>O$txuY(Kv&IhjfJ+vDKIE@F5nZ>&E&P3`D1yx9 z+zWA&yd#y%`Cx68^qh2b^n_Om5@L_k@8wPP4DO8_)BY!O8QEa53Di6FwNSAS%z9l5 z2fdczq{dJfk=S@3(eM7m&FZIbQU;VO`hx)!bnx%iM2zgiK zQ%tNV(k~ib7wAYBJqfK3d-#gGH8UkRxVNuNk~muY{FsougPPlH<|{V+TRF%rvvW;` zOgG@2j%e?!-0fe|@6Bp1XFPklY+Lu9;)ChjCRIO2eli83U+*IR_JqgkQv=%bKJZ7^@r#+aeoIsrEDGvetZqQk6;jZ z9}UiWo*!O|*Osm7-^;pQ7$y=@&dQN(pZ2-3Hw2)ogCr{3S-aF9C;V-R#&=@z<9K}X zi50K^`*sG(`fiudS@eEvGZzW^Jl!L_;tvkH9nm;tH2K53&Hn87ES_P!i~^tX_+gwB z1Q%^@LA1R?6$9quCDh6fCUk4<5I5V*KKDu6FxXwPUqwWP`)MG<0;(B!SzE=2E}sAE zYiE5c_8YrOsSP`RV9W9zc@D<=*UQU0bXy_l+9bMe=McXWI(yN{VL}m)56KDnSA*eC z@p6k#amCMfs7)`92$0Q4gKea3 z)2f*L0wx+^D_#Rn!Z}^aZ_u0(SG|%bgmCGOPPsa`8eaj(FmJQHV+VYNi&V=Mg8~)!VtD z-|qPM>g{Sa`N&~WaK4uvuW3O$1R(|~p0)mosUp>2)P<>~B^->iu|ZR21&gu}_1<~6 z95w|iE0f-jpE4H1kXtJyofDIm(F~?KBnfy)Bh}Ia@8UAg*E&-<{FzS7eNw9kda&q+ zjq-sfQC!7hI9q*HlhvKJD_{&;*`$g*CrejH)cH8hbo_umZ%0M8FRF4(fAX18GK(`(pM~Knrl69p ztL?dXS>{)9(7p%aplMH@g-wbC4iD6uJ!VR*wYv?$Tuaj~M>vH~bxa6;0JT((tJHQ~ z=Y*?RlN(apB)#QgUlIlWVlkP{O{iGyK%){~N^rs3ewtFISE*U*o(nFBy>y!D|99&M z|7pD&2<5qQm<4ncA59yEg*B}rM@*eEVImcO-Uok?t7*XzLy@0L^#ugqSDR_YLa@$| ze|@zh7i4oEJgy=luU#>$r|k_D?Ql)g94qK?B_dXbP3WEMidwk8I^_4Bw5ifLO>c9S zbgf%YrZX+R!U&D^={9vKbfzQ83PXPP*z4_P^grQ$7Qz*WZ!?^MlO>-q&&}&wDUTSj z98#o-y0%DaAx7&h)sOQmNQZ&L96H7W!~WN;CL9mT+ZhetLE+^CtB{(yP7-)=13^c6 zWz^33S>HMyf+iPPcpx>PgH8OwKkKf``96_n|D(tC2}r()oTgP;QBRt~RPS(orSnmC zJ<*_k4t10_YMkvk2vPenAvCVUd;>Ua4B#D&!>co%hxW<8x&@n7t6g74X^V>~)U3&; zLUU1NN#=Z!Q4gLoWzBDV24cS@v9u6-L42V?Jf<6O5 zqcmeZt3I^xw3nes(C((oA%VTAJ7IfC2Cad_5&j?cUH55rg{l$n#evls>oPf@YKt+U zUR_^hZT|+g?}QkJ7!L<0@a{8+n7YWV&UtSzwbt9$4qYf$N z!)$Kku3o&daNqV6mHHbAgo}OR5i6os$D!q7;Y=H5#hhE9`6#f}Zrq!CfYpqR_8NGL zI1k-8k~?Z!vbckYn8yYt+;`Z5PL3?;0JB{#_PZ7`+3R5_+PGCdZzy~n)?%T-v}M0e z_9y|*=B246uy9)A4N@B+&;z>kci;UC$AUR^e__U#>emb^IMB?qb~ieB;ZiE{6xi8o z>%2-%!3mKtIW;K(88qy|`)_RxAWW;R#VN9EEO#R2ldR(px#crx#5*!g{HtYruq4Yr z$CVvVXjl2P3a-oP=9)Ym9XXe1R)o!Tm!MSkY#-1kRyfIr(#ABcvgsB-uQ-JcWHm}3 z8Em$QNB55r+!SZ^|1wOdbVe@tcMzoI(tVTJ-_uF=YhV1mp)uipLR_8Uh0(d+-xXIA ze4;T1#B*-`s&qpk8{(er>JT8U%C1}lCGyeo;hLzv@X0d4&a`MGwd-3YK(Po879fZ= z1V{?Uw5k`c7mQh61E1+xnnqcr*FlR6eXYbi&Xm}K zPB^fw9T-=B4YH=~6-A-Jp}m)C9A} zFLZR7VFM61pSKuET&B{95(#MemU=Kw!H9Xz7|cV2lC=TvMZZXZp6OQb^i46zX3pEh z`-^R^LV-XHh8(6#Pt--^(wYB*L#z7?VPQ|}JtJN@r8KSD9B=AXS#?Gca=QI~b?>Q; zDyqkTR35xLj*r#=`Lx<)SbsL13l+B8y&qw^A%htd)Uwv3eqB32|(|XuGxJ8fiWNVq~z)H`!_cM}?E@zc9?zMsw6F;qr z6Cp}+a7{Yz#8@^Mr=uU1y3pBmRhDQw*@_=<$bFufB%$~7=yS5gP`xbRtuUe-o~>MKOmwx?a1I++^}#P>iu!hIa+;y>X}=PUY%t;?_g;?R3a>2GkYi^ z{se;Q_}Gpbt6It$u5egmQzq^)`3q?Do-wNjZzuF zy4AA|8FrR2ppK27dFZAvDY3$V15$6iognhia)StalDT-)`Q8N>ZB)lr^ z*1-8Nx7K%i<;nIF2qp->R-iHiL3K}kYL&bhqOj^zKD1vm znBL5MSXQIKo+tsXUVbNlB>(acO(?RM$#D1~cq|L$R$K3Wzd=pM zN|Yi@j9~>dGfn@998?y;zO1R&p5o|FNnLT<|f;$+65+w|}(VvNkV+)j}t+jlc z2Y!z}`*~rx4fCNDbXXcEX>v|6M0Bga)~o;x7cF5=7Wnb)>hy5Q(;372^j%3c9N?Gs z)Srv%rl&vfnaU50Sg_>r-v=Z20{CdcI)>7k^WN573W%d+*$HzkIAxvin#%KLjQbFy zTEO3`hNb(wl-Lo!X{@w{>*&@7%@DxHdKib55NSmDUQGf2jix;KJuUeTgVqIY`}RhL z_{|d%^slSmM=iVx3C_&-Ij23iNJ%VKXI8BT>(!D))&5iGR4Wlua1YEqxd31lNms2q z=*2>JRlX0O_KKEfOsZ}cj!n2cz38ph#O=32@VQ}Cr@aEd)ght^tr1%*CxMj}KRQ`j z!8+xfNtJz+)oT&fxwHn3T&C5d)>D$3r80L+tK&*%h=53xRi)#1%Px~OO}9thLuyKI zH%EMH@M+nFCya1;my23;O!51qO;cEJIc(^NXUScjyNjMUpZS+&A8~rqsyQVTYKl*; z#Q01DU++L~2s9~D;n4zmC6;YA4#)@epn&P?1UBB0L}A7jzon!1Bk?TFbXf#3byOl+ z;uG&JY$qhuKLgrc9!zAIw2{g%3NGu261O*7;c~=;Wyfw_GQwWVSXDY%)!q;2AM4O9 z^YHw`1<0Y~ql=Q9n}3P9(Hu$V=hHHA41<=S)Q$FjQ@N5ly|EIb4Fex5!gjmXLrz#d z4gPG%sZ$*$Od$u?>Ia}@YmVNmNeFgtbP$P8bv|+($i%L2FUD;R#xR6j}z*hx;r_mh80sJcdAQeF8K! zStm__mAynrNeU35$dh7^ZT^=%k}`^l(=#Sn>%ji?Ryf@*P{6#Z8$Pe(SIXHe>MD|{_|w_&B8Wh z!6sg6rf1C13>4RrP?~DmcK_^tmChcUgo+@w8xNLu=eig}OE)8fRR3fk?|k$s_cAt| zI)T+RbEkjOj*{k7?!`ssx5e2XaEEI*9<9x6_kDU2Hsw19CC5uErxm-aW-wnJw)KPE z+auy#qSX^?V{828FS0lyl>QfYRi8oTKDW?V5=!PD{4y5#3QN_>oR^tOqIX$f6hTX9- zHSyzysbcB2kXG_%Gwq`R}wdjPm&yX?}#C< zi!N@Z7i!DGS0cAm*6FmBb%n!9>b_LpWM=$Y*k+588q-Prm{z|Tg4^ONw9`fjaz2m%q0PO0ELBBHmolK<-Q1@E-2txpRUJ=(M{=1N zsHsERD3Vj#mX1f@aP@tUs7RDrx7uYlDNqu|-ZIvIjo$B38kS+-*HiV1(`2EBI!Ilz z1?=QBBAF^Xp7&G=RoS4vR-o=FfF&oic5Bv3aPPs~oOY4OHrcD#(r>?4s)7s$jptY< zgoPeRmt0q;^}1n6*a@0lVP&BWBcl_No}8v_8VAlbUmeR2?eGEzW+*SEmIJw^HW6}9 z6iZAh9OHuLQnpMCHs{HE2}Fv;YU`Z`%GGH|0~GiCMs$5u@+R-07o58XryA;~;L@M3 z)KN(Q?NPd^+pETL$ zGw5su4l!mH7Y6vKL|v)IXhQu^6LiYQXD4ne)@QhBC-Qs(mDclP8Ju>zPn}Itd@iXX zMSqqhQE}*Hij&%2E~TNWfE-2Qls8Z;=^!DOoT+5O3P)|hJ|rW>yqWMyqf@k=PnG=MMeQ#HS}?s?G24bEuFiW|$(_~3h;8~COp z0@hYT548HdEHWk?mfLo3)S&knF&~q&?qM`MGZj)U7#>>nZcGmlW-IK3Ru6oH)F7hN zq6eB4dY<}DN^N#NLUI|n*wZO!G@0Bj5_6&}Y|T>4gWQCxQ>T66wpCOa;kR1Zd(&xm z@bxtBz?tkoH1Etfg~$DGOhflx4x|mq|Lznv4oqi-BE=Soi45FKv?^@__0FM3m4!_d z9QZ!ZcTB5!5hCOtq>+Y`I&Ivrec0HnZ7)C`7GTm+amvfw-bu&hw&bz~b?0q%ulPBu zO0e{xv%++eb;iB-mlss(zC2iA)$IT^eRgzuXYcu%XhrEemS33RN%e<_S6a_>TsiWZ zph45ro3uyT9HMhL%hn?Pe4?FstzWz|b}YO>fnfK09imH^o+F&q$0+gWX!D2!5!Io0 zw$Rv>s_iP3;sgBG28b;VFM*b|QFV%$G$Dr=8JWktOFOg08%mAZ>$hip85G9?r76Q3 z5MMOboB9@Rg&ul$GP#F?v!4bYs2|>!t@6ltFpbw%Ms+^8&qC>(>>TC0=+wD|gA*25 zRfROw2=Y-PCL+bZ9DKY_TY2v|H2wo{crN4q4?`-HdQWC+2_vkS1xB)B{fh*glkYgcH>uw}1q$Q00-!~yv zPUh|Ksl(r&Jk5qZMI4rxv4lOPjk=?$vE)iI9>GZNSq4?ydXd&4_94J0W8+;CL68Ti?+H!Y>&u-Vk zUmP1DqNiB_du5(_a5HHe>uaux~oi?k%^v7`dZIL_vQ*sLfW`!rlDG)={?Rq@o=ke5a081J-2PUMC zYc2SO#06Yv)uo5)kwZVd!b#cY+CL zcNPr3bk@o`6vMhS+xWxHDp$grbBT?5FH?wLVyjvzIlu#x)?AU3bZz~wc<1aL@a1d5-)@1L>_unY|r0hrYuCee;Bk zC?heyqs>PDvyAl%*5%L88@x@(f+iP-zK`aX0~TT(v4QBzCk+vB`BZeP8NAB)$DX*Y z;aViy>ud3X&Ms`6d(k?tm0iuhqiWZ`#YTAM@--V}tnD1Qb>zvqsGs=)KwUvY~i+*A? zZGxP-C6wR%D48W!x)m*I>SMIu6<8{a|02vL>wacKlh)hLs%O*eN^yuPPW2|EEoXsj z*RgXMx6Th1$3;IguJ&^`3MwuZ1+WxxV*R7sYL>Msax2jB2=s>FLM{Yd>4g}{t~4RW zr?yN&@r}~#&*Ru(t&CaNXz*1P?YIwMwy<@}aHr%bvticF_uj?XZ2_UWFF$jaw&AhP zA+%wfo$$mRrh~>E!iX1@Y;Dn_+=N`U#?Ol%X+ytQ`)WhAt4%5fbg#}w*5J-!GO1zB zR9y>p-?RVB!sbLFU%gNeg6W@AEWg9|cW8qdujpRs&w{X$ud0W5aazX~pb}+o*S-3ICEb0FjxvJHe|fdUT+l4? z>0=lwCDdKZCjf*Lpm+%3{rc^%sD96hr0(`wiMwj1mvxo(`FUtuN#ISGPg}3E_=6NR zF>RT5l}VcNy8VY#bQqU|X0iEGfQBQ~=kZgVM@aJ232bzDp0m)9kC?3`H0My;W@>8}rnNTP)!c60@YU)@Lhw`n+#_l9AG|w@KL#VG*1@X0ZmGv%@$J*B&3edq-MMs0 zwcc@?DSYq^^?UbpojjM>@Ma^#4HXA@?_K=h0!QI{k9Xeu?XXGJC$X9{)dP?s0XxDQ zxz@@(Vys#WlPm0hC(9uHW$R-Sg0(bISnLs25AZcIt^Pi!YnZVV{s#)r;I0%>!~eN> zBf-uBi5&2D4XIzLp4_4LtJ7ltYH?0LMEqLaM?Efb^GB}U&)8?dIG}PZdw5hm^=5YG zTK&){6t_o$zI;JX*{{dHNCh!!IY~wDN6%Q8XMEyNy>*09BtRZ8qyi|fno?~xA6{SGY8J8_v$|BR?|gC z=cM;9gQ|@%4{N-R2E9%a6(ak;TUFli?p^0($g{T0{i zj?9xNyTyDp{zhZGJa#ki9CGT{g99)jrt8<@C zz5X)&ITQ{xeFVV-L1k2yW-r^SA|qsruvMqMqiaQwR3%qDUc0EL@(7WTf9K>BgN0B= z!HIt}a_NEMlc|+26k2#=-rjT_;l*hv$#pGsnFX;J+wQ-?0 zAmDbhuN%6*ycdsb2(!Q!LZ3?C z&;^BUiYNK9;T1CjT$XQAIROd){=Zj#>K^x>c7w|^Qh2BSb`cE`@rcC~4M@e0H47-? zX@l7x!w%E@d}7Ad`gi5F{MtQj3ODy@$-b~y`*b#4!nH-*@)-Xp4=&iiIu1nyMORcL4RQ%oTkFD!ZQ`mV4UB|mY@IF zkCMSj5a0RK#l%C@sm=saPda<92*Q|4Ok zV8J*9faxoe{^7B7@5RM;;Q7(TP@8kN6`L@?wkAL+A@DP5)L1i`7YVbX8p;=ahdH+d z^!U)NcwaxpQ-;yq&$=UOpz!)n(rEMQ{R`-x;x+3YGniO4M@WpD_^p=TDEsmBjw#vvw*%qx= z%~_!QEw7wVnR9UWKqX+9qB&M3;gwXRyPW-7!Tvm}a4x4I3iu+=!gzuA^p zF1IU=3-CS|C)tGtzq!$s!)%#vqC>l;{&iJp3<$b~P^1HIvi?k@IEO1tC0z5KAN{w2 zwSJFk*I(fW;)dN9FGp-qG!RUU#$A0VQcJxSffX8;pC7sa-X2;q>on1s_C(aSlTN)t z{Lb)Lntxms=nP7%Y8>^Z6R~J1SH@S=DM^-yi@(FXd2w zW8&$EdGVoTOuuT}2)*CXXw#eB+VwDG_Rt>~)B;YMl|c0_^w)#Nu%{E;wWj{FO)p<- zI%|5D0^>d?R1fK=A6<=ns3QFKD0R5Oj%iO`%{uST0}0uga^0Gi6MFy|@r z2;6B49MM{M!R6v2x%stQ7v_#Eood#>m4pA5K6IvGRp<;G!~!M3n)W9s1Gd;@rdX3M zt$rEM4A$$}f_PptaU6QT7a3}bJ<5W@CXA%mvu&szhELqSxY?Ce(f}-GJu_^lg31k3 zn)e=A@FD_UYlzH)8WnheWLu_jt# zqm6fgQZpM-iORhR)?T1}ua#A|Y*@a3<*omay!N8mb(^T9cmpv?W_*I9Wg`t+J5uoW zo^wyLUr%uj%&)8iq$TSSM?Tw`AE=LeB!Dz_7dSA>gkyrt^FVeSMN>u;Z8PcZ!|C=P zX*JQzNXk)sVf1zO+%9bk(8u>IWue=4UktK;RF_+-83B2X{$Zp-#!ma z!HX%NsJX!EC~6FxXJ$7zT42CpbVW0;bQ({_RxFH+9)cwi#JwhNE+!>Qh%P$NRKgWB6N?T^ER7Fjh}$|r&j-XW z=k$&^d$nmI?PUq|!Cu>k_@?16>pW)7!~&xj+sNozP_l%E$YeX`x;x&0TE7j*t~Fw6 zU~1tu7~5-TJq`sN!4QCs%gFkkY>r_*o#zLSM56_zb%2`%X_?5!7EEC>oJX38@u4q} z^A`Y3^U82h)PfEh1o-&8^{ZI#!H^1|rnP?YGWskJ<{<&MfrqZMGkvi$Y7v?xEhQ?C z+M10j(x{XQwkaw2vLhQ29Q@cB{qFt!!hm;o#Sa3M(`9PFm{?{2tY8MfmEIo^=7nR> zig^VCinN)Imk$drF7Ah_s;cG1qeRDrU$|MaHW=GGJICW49n09zJ{1)-?~j=H8vz=b zu;73tdDQ^_S98n(BBG)hKP;j6R|9fjg2aBgmm+?~R_5hP94jj;w9N5fuQ@QAR_n5v zteC(o1E%4ue8XM}&r_hef^WQ*Y3lebJ9PmSHN3{wEFA5FfG7ADjdE`ROd}!zZ~Mtw zefLYi(|Zrd^(1E&ISXII9PWCX z28ogZZ+ip^A;YWk-3Le|#A`h@a%A~pP}uY0OpuzI`a5l4yV*z{l-tr0A(NSsTM+W{ zjkT^Z7^TSnWjYg0$7FMK;|i`Te`n*54_az&m$m|O;vc(zIIKz?;S{#oKim`%K0{XL z=crjQoipM=PC5@p>8@t-cJ`D>i0ub=55TNWti?&6Xob_r?~Kco>H|JBHP&G%Ac01@ z1pc62ckZeA*;|H8OtkSR(R7zWlN3H{q^0)TsmT5!@YqHJF_yQZa7J&o0tsN}Uw7sjTx1=DGjzr*yGPVvEgz}^xPDb@&eUPwzyRYkMF$+cIyE-%bd%mh`=nn9({Yjy!{SSs>FG0-$9<8}F@~y_f!!&#oZ%Wq zz3V-zoW>~0kW0pr*c^5)`d4VC>J@4D0=PrX(b18}czqcUe-xc~U}0h5SR04s%*>3h zr-z44^pRjeeX#Q2_7YHTET7jhZSRzC|2%dKAN+b-qQuEDtw$m+YDHJ}47=#w;Gv2X zq!dkK)&jt$@iAPZ!f0}+(G%4PCTy4g0B(6#B4-v@UQ4^d#rrd_<(Qm-x`W!CXuPS$ zSU~t6Qh*T`79RHs9~uGZ(o2jI6x)jweWI4D#E+IeGq~35_=G-_{~w~XMy&k1ckkkP ztfzQ(TLW%ux+a=D;Nv3&=%7j1wgHA*3FMM_LL6LNbm#*aU5$ymzrkg}Hmf&|GVL=C zfv-tM;njxkP}y(+7I*Zugh5xAiSs*gDK2-~bpO1vIH~}o?78W`%d7HR!NF05g@{Hl zNhH$XbzCu)wk*y1=n)o}#R1z{y{mV^jiBPc$Vf?nX%p`dw;kt)2LBmf50HRB_8&|x zJ$+ZAk9yMJbC~41e=%rG#$j0aUh}MQB%g?WKh%sUv#Bo>)c5(_rLhqS2&#-;#nwI6l}X8Wsd2kGsLl zH9dWp33EL{w!-OUBW7@yFa6Lj%npN+{D<``td~ZqtBb$tm=3sKKy-q;?Cz3>p-JS# zA;Cr@(Oj9QJ(28?(+oK}rR0#8;XsCWy8)}|LB^1WVPKjudf{n3!Y%F|yrCryavE}K zDEB@1jG5l?wI0jz=U17YI`Q}5dn1y;UDS3+nMP_-c!mHS--q%mqQ9a8wWMoEFg^X- zH%+YZK@XhT}`l%z+tNGc(O7NxP4eHTKE zC42UEQpnh{M}>?sLBT_j~ShKKB`* zg`1Dgl!!CfgL6oxA40eA1^KOP3_L!q`)a0SK;tIW_#UI|FaO`$uY=k*?a9OGu+_XD8S9!8j)5#ZyjL~S-IvFz#ktMM#nMB)1I$Fht3o3^!f)A><+BH@S6g84xA41n?0laez^SAxR=x6 z`k%&EKQ#Yp^yhTY|I_&T2kT#rLbAW!!Ax*+juajaR)TeTmI-=jzi1YA^Vg?8jR%7R z{%X{?`77s7BTMkYUyWTix&F}X^S}N6FF7}@5A0v&!kMQGF}JDPjBvjbosgNJMkda@ zTYoifS|9QuITjsQTYpTZB}@6^Ll!K1TK_V(s_fHEA0n}_%>L~O^0Jgao`B)ZgH`#5 zp(lP**iDAAIP+k!{xl}|O|@?}+Je^s{^B;*Z-?ckQ!}&ZByM#utgLL&A8u7xmahHP zGu6);jt@u)gPeKVe>E2SS+{M{l;+I4{a51)KkH3yuh_BZy#I@)ysYS^oQ5(ik=cJW z-jc=sVaSIi5_a%S-C{Lm|JhWZ-DAH-nEqz)qJ%h6n~fw%mf638mw1X3wHbnTZ~Yo! zZEf}>a&e~rY0U9!gblyhNV_GwvN^2XCqKo%Oh)A76!_El&QI~LY(nJcZ1~go*-!Cr zhIlv|{?JVFQ-rm(Nt5^=r^27cP(Q`Lx#i~c-b^)Ow`6(#N;M0t!PS2nHDzmHRcm*25$EPc6!MmARZP%0cM z(%4|Ogph@h7B(8Ny5gh`MxZQCPD53}twXKr4w4X)6tz`C-U%D%KE#3I3UnI8pr1G_ zVaBs09_T*ADTh7iG{^*-4Ih$fHX2@`4I!g#NT_3hwA(P%Ld1L&;QaBsZwYLO15G8j zt;W|SjNnR5+tT2Pv@2QHiPx!#BU|`&4pDDew}|cgq=>B&Zm0K$PHx; z96$;|CfMXRBB^hKUk-Wq<}LnE&fNtK0DO*(e?n&q^@T8X+y?c}Nhw>Xho4G?RkxurIIOU$r+JJfl@vo|FMdfbDafjHds z(}zfv4L>!zUDCZpU8lLEM%S$>sZ_qdp3MX5$U?chtA$?AK7472R-Akg~$VB z(1ziag6HkFs6H9#-m@WmT9V`z)r)Gj-`E)INmBpTh7G}hqjqrPwk856Bz02~g_5o< zk{mPqN&qYX)P(6(C`*7=xJ3eg1FX6$y!mn~z3aMNoy7|u!}ZsqG{ zd*x;qg%a$RE*)IBpX-XmTx}QGqR8lT6QUdBU!b$L7+9w$NpE8;R)v!GEn^9D<2G^{ zMz;OP*0H!Jcs9lg;-)uj>Eg(R8`>bR-fp(F3-5%)Mi&)sgW)YsyoAo$P#8@S$BLxXa-J3-qUcTfz_!H!{89k)MmVlgcL>}EpMwxG&K5hw#aAZmAMi)t>#g;^2 zU7SSTP_iacqG<~iBYBCd4LQ)s{+}ru+Bw;Qufe9<$Xau4fKZ@3rmoO0bodN&k zoltcRK_Gr~vm)izjrWwE7DFWrrpB0x8R;+}E+1aK(=78$QG89aO&Q||Zas=`0Oo)1 zw|GZf?j6H;viJ@i^7;ecH zRF;Qac1fV{#$rd;22u^3Rp`F0t|tMDaG$ioB^)@oWo`A_?If zmomb0J{1tz7!gAVkyr7!&xZPxX77;*yN1;%3%xRvQb8l?=AmJkw1$Je$~hO!8g%2o zn8w;Ae6^ePpAM(@wuGPeUMiBX|6*jf5Z7C^khu$y6CX7nD}2$d-oMExgWdr>(u26cmisd5^6{05)I z7&MPp{`BT{@)sSv?V@!P$Dc#t+}JKp5CZ}{nyh{;zub=4Jp$<} z^=L#nIi$mF`FkL!34I; zDHR!ySo9u6t?duHGFqw>Kx;fXwcNZNaiM{{J(}F*vN1j<|EU({vzEW^D;yW^m0tVV za)n!I*}T7hkE3sK#RB50A|P+>v&6QkfFpOB6C@kZx6FGjP^&%ZPeLjNdp>t(+{z+J zfmsFMJ+i^)kK!TTmzi7gR=?#>0ki$m`yXq0B2ImLVi#PXHJsx~%uKTrBQEv`MS`tO|YNoRtwGMNNCYCZIzVB~gww_Gs-yHtaV<1Zf zhq*4ES=a`c_Z#+8_q;ROHcY*DVVVJ9@|3_Jx)frw?mM!w>R019wBvsvvAk|*!4(&| z9?;uj(uMYO=@uO$Iw_30E-3RrThREfWw+}o1YZ$o&PdkVJZ=mow)hLl8h@p3%qRb} zHQ@by=ozff_rp-=D*RjNsTccsC>!J6Vm-9Xx&u*-2IMD3GXEmK^7a+0%>BeBn!5)? z{>cZP{+!wTsn>-*FXiLra35>D%$Z%hnE!cfQVU#Qu}ui{K3t7^NTc(d>AX;@k@tpM zZ|;ilqxQTjEw}S541F4wD2amt&Vvo)s2b+@Fp{HCm%BxFw~wnFyJMMw`=}DOz)Ll5 zt$n71ILQKlcN8Zhp|tx9Y7pL0coFeSAqb! zd)SqZXTqVas}_^p;w-UlzYSZi(EIe7F-I4gFH?uFWKXDNi#Ae=k5cW&n{m%t)S2l~ z1HLn&UhOe!5Y=1SPdm*i;2#Nq_-uM zZQFJ=cWmiNvd|z{4wbzN^6Jbj8ydlFEH`3u%wyc8yqvEaKpe{rJJix*%tnsV%ewQ0 zI;~H-j|~gh77)@z3ZZYUQI^F~eHWW?f_x~SF%F$C=rzkq>n_Ahi#t)3fjXcwKOuEE zbA%e>7|ePaM=M`60mqf$Y*-HZk*89zb{aofqqLT9rXb+<$?h8B&RHPA2&*mF8%*Sz zDe0%2C~468^Z-5eBqbei2Z+~`$>^z6ExbcTIUltX6>vSCOJeU0FN$y$kdOG2rJXB4^82*d>cWnh$c;Ji*q5xxps zmyvOA-X)-y^%R-ckX0cvKNX_EHnw#61HranGC-o=Obc%%$L{fz-EH39r7ldYx4}?B zA(pHbXW__U9MUd>ku&^cY7yg+8-x*#buLiDOW8bj6pS>s> z(AK09XNk@LXWCJE6c%o@NZ&0q9yi_JVu(7rN?gt%v3u1NNA$d{!@r2DJloAeSy9Mt zqIMUIi}KHxQI%VAI7*?ehC!XJ;6zjA0YW~>MP-kaJ>ILi+No1##0wrRf)flY3& zgZ$NiCq&G@6B?yN`8@r9@g<@QW8KS$?wkosIf)u{ACrM-r!7{au| zN$(2fIHM4nbLHxR}E%_UV_I+?D+(-Zm(-L;0c4^9)Ti%y~8Hl|{@h0pAcD|HD$ z_Vt#8mvNF}-C8dc^3+-`<7a$IK8n0%GpqZ;oIqGfaJhS70R2 z1=LpOnH#ROFWAo`zC3k-;;US=Qz*0X@(&{~n=mQ(9><4X^P{^&O(;C5HdDp%+LP`T zGlm>%OS+^N-WA^I+n|32B zC}5i>XJmUIZRMxjbMGvh;a{!Ub@Y<8a)%@QeJ>*L4;cFyo5|gE^y8HsHe3Cp_PZ1D z-8ksRZEQJsO`FN!M?X?5k&t3HliH>or)-rgJ?x1bz4 zZ`Kd;$}AZCo4HM%?P=CtqCY0w$idz-;LqSda%AxtN&X#vDJ(ArF0c>b^q^`s+Y|Jw}JR~2Qx5jt|)-96vDb@h)gLtB>RoFiJ>VV6VI~pBr*f>t$dgw|r>M>LINj z=yr5E<%IDe;fkhw3u;2?@ZH)z@ZZekekoAN)JAZvjs|FI*}{*ylvvXn7PbWx4n<6D zx@ZyfrMj!^mXhRJY_r8L7>=Mbu##R=&+5`|7-f1}C#zZiE+|YOa6H29lj<)Zexif2 z9p!ZFSX;0jn%&35jP|hRWzM=r@PP`>|DzF9{Q1E?-Zz*%3f_C5i zg4STI`~6$+&{5aR23O<;ukkFrc@DRQU4h<=8&6By!mL)bt^?(k$8aGiPrgRh2bTX0 zD(gHyKavvTlBMBO8qwvMk{xS5Sb1G$HD1n;QOd_Y>$LUtWsDo;m$VZv7)V#};PsZc zN0U05sDSaT@z9TEO*(V-~b!ZxVxlQ`dZv;#)2Vz{jYW0>A1)*F~!d#y-^X#Ja z*c|?A4Oq3``wHaF`^&rFnw{0?IDYc}jgNLYX^bVm#GS+@qD;VFKR|vMuNJ$(VS07U zN|Mx0{OavIH$gD8+B1QF8Q0?Vd^&nnDNd@G83%P;ED*}LUj2cx{BE?K!xs9gHq%}) zGkNEI`J&>%KCw1vLzS|4_AK7|$2=nzHBZo-=oBVt_=cCuwOJ)XM4fR`SrEWGw*m&ZyMusnm&BjJ|X zrF->36cZf9<;$p8Z*XnQJzoyDH?gk03lznGaM62neOFoxokrtMTZ6;De=QY{s0=ud zOV9PR1{Eqr8AljU?b7ee@=30K=XV(~P>GTmHZ7eDr`_O48I#;2%GYxbsfN{A81tF} znJ;%lIm0?xJ|g#YkocG8ig7EC!n>IAqWK1GORAJwZS$2i2LZjAhmu9@sV<{URGswi zmTqzV@r+9vn1%PPjz4 z8BFEHK`qnPSAAK{ZPqj2;={-W_Q3eKz>`>&rOf8DKD?5UIb5rgzGyFMzS;u24v(33 zchdf7N^iFD2-ur@@twR~A!>PZ@qy*YTLJ^cdqvwr#ybO=TtZ~Jw;jqLKHej`&|&kZ z58HV&sZBaiA8cN?HmVnu0p>R+pjkC2`PkyM9nmXwMLA%z?eX>Dr6=6VhAO~*@Mq_* zr6=Y6jv3H5ASNHClLAUX=sQ~-1{DowNkS>|+k_ z8umjSnWiQsO3RN>Jz(qZ>VLCfiebJArAqh%lj%6!G&m7#4$}QzD5WIeA3=FnX=Oa( z4{HmN1>ivT^$)Z%ff|%*igH!SPY@P+37+@1W~EI{Yp;BF-XsGrALM_&gkp^_2ifTx zaNt6suUCOjGkUfDrVEMgc0wUKO|0r#^ zh7QFR{DG7vGAYq_+?m}5spbPI?Jt+u%)we#>tJ4+d> zWas1gFE{1-+G*PzYAVET( z#Pfy6m&3ZqDLv7#s?$wi1Tk_gOnd%zzN>o@m?%WACst&vZLd-k*SV!~uSx2k#m%Me z&v=vcesSESt77gg`w6T?dkNINsWe50JYy;9G*+{23{_~wsD*jeDk86MlXJc`Q0Pn8 zRWl5I52ISSn}&I_;Kz&%J@6A5;pTP1qU+wD*k@oN`e1b}mgqX;oSgM~BhRbH zoaHeR(`p&Yw8N-ARN6vJ&N)c-t=o0lSu+EO{9w6s|Wrl4f5*ccjwSf zuPH0nJZK+8N;A^i+>;;=W}iwcml-(sc}_W8WZ^o@6 zl`IJUb-2IFI@+pBFDTFa)xOM*cOooZ|37XI4ceM|cB0)k%~|*Nt@OK^({vVDS(?E# z1?&u*N=)oHcGS=mJ=OR+3*L(u0(%ia6MfUQx&cM0nliR|B1BA;MBu7-rIpCMKsrGs zS4=Xw55z{M-B=Ku@_n`ccOW>O7jT0fNx7K?Mm_g|Im>4&S4GT-vUGS=Cs}RW+;EiOf5aVW>ak$qdQCt#HyD%m{>$nk9;EkfvZczAlhbj=x-O=v7T_ z&`sti=%xP@h$V4~iO6ePJD1##>b4Dao4%!kqoiW3hB%4A6{3*GRK*_D(EF%t6Poi2 zL-ozR$nPEN)td;wopEjDisVPtBk5gdI+j50 zNL)$7x|D^bKm3)28Ibr|uX*_^mpJ~WZv{vNO+>U=VSnJ>(L#k>+YxXYBY>?8X^!$v zQfKb=$5XVv4el!v<0u%=EPRcqf4@P#L$%xJwDXUxx|K&epoCrm0tz&UmtsWre<_-!lcXXd@7n% zci=MzYzB` z(iwHkcB*)7@}UHm4|)2yysB)=EP;WA{lUu8wHIAF5rZb^sk&va4AroGz-OKr;cPpa z$SINC+znRF|JsT4ziCvX(o6n^zV(>Wp()9Ul? zS=WT1$dC1^V{}<+V(v9guI~xMNX>YHExBrQbc`L?^6^h9jkkGlRNt+%>w#2z*5`%0 zr4$*koFF@shO;dYnLMT)Q6R#iQ5#>R3G8}`Up?F@+J(K0BXz>ggI;c|nb~!l|EZK= zdOo|y@C5Xi?>9n4wOjWM_u~oI&231AL8X@8a=J6HG3+c z$dvF3QdhhLE-(5LQh&~a)=3KR*A(eX?zJQsjY1K97yL^J4YDxEZ+%iiWi}Wo-G(J5 z|C!@dy_H%hbE`H7)7&vf!A{%BKZ)T?IvB>nz~MtV*5Nam9dw>n)r?-3eifjOn zh=b_KaMP}kKD4Hu*2!Kk?u4JBByLCaC`@Mwvt62@wvCCN)?zynkXH1sm4%UeK0Him z$S=?B0NyNHmv+Erv>?cy9V33=XyH!hnxc=B&IM}cJpi6rb3Nje!fs{$u z-FdC07=aAk=t~4ipnn(Mx+UI2vwDVOmC<4~sb=+i6dGY%Ecz?!n_y4Uc(X|9-O^+?spBeNf7FYj_~ z!)gt(g{4Z#&EU2n61D&DSmaAn4I^i2rYOREIc)It^;0iH16Uj1+wA%_8VcJchI5!* z@I5^q7V14ST*2?sKQEW>dh?>okw zitQPEJs@_7VU7&N>L)&LL-46Pb1ggsAehHq=exHf5SMRnV!qyA?%_#I1ds~E)OHAH zXiwPyULg4DqvWk%$kxsw@F>OCY#VVH0$CvLgwj=1kq``o4*_@SR#KIMQ6c_`SoEMh zL}oVxfc*Yc1FZ~0&ffE#;k5)mZVAvaejuh!vFe|(%!U3$+(c?286jZEsp}wTAaWDK zl2~D2$l;x}@_i5}1wnfPbqV+6pEw9pNi;A*c0u@#donN;v?G_-G<4(DOX-}Jg|IBV z5IRx2)y?MESk4-pVJ(&R2T5{6D8{6tgH zCn4q^Z?A%&gXw7k1ln03{?+y{G%JMWs@evRWp;=g2LV_B`5~Eqw;}v5!n8G&+nHe6 z`lUD9_d{2U3@`;;KKdn$I2&r;KYF4R7$*(-|EYkt^P#Xx&aG&*bHN>ap@z33%*bRI z&T_evRe->D_a9&ZXew~3mX2;;ZwNgZ5(;5{5HB7BT_HTUVJ^rS64M}OLRatK7RIuYPB74SPU}Z4}2w;7mNhEYK(dxpAswg!;a}#;RS3b&9zTO6p}Bl?7|aF$)AzT*OL#u8q25AP ze}Atmj2n31T%Up!EI9+rET`~|c0uSa#0futbP)vIsk-(Z*zfU=6TFrmVEr;AmL`?J zBOQR%=#Z@-52G*d`NZ%|$U&lBUT4dvof`*7z}2K>1$PLCg&MEJO7CN05C?}nHjik}cjErG=;-o!0leh%sKcS~l3dL6_n z0$FmS9foUN{=VLVwU#N89lEMYUrsw$7p4u~`ii@DrEHBz6eYx&z?6 zet&jYRWJw9;**h|gn{lmCBF&2LNMSrk+c4y75yTJ!^VXr%H_F+)Tu>)jA%*iwpq?r zqBR;4m!7cPya8S5W%th;MbF1cl>(5JVWFf-@s|a{e#9v&I{(B&n8&s*Jyj*>idIX! zObmTlZr(T{I@v*zQ#3LoR}7ddXwBpZb<=Mi)(|u({x;@S+;t1Ito~5JUK$s7tRdn) z#*8mJ?nWGNoo}SWD78wmoreC0<&O?8|b*7XZr)j6OJ;b9cwW-t6VlLCFsjwjyz)Oa z&DDk3wAfqfF~?&HhqEK}_Oo-R77$y65m(iV@3dcpJI$9M+t0#=NU@9;-}lj{q#A6< zhkVi&nGR{v#JA}3Ovrb~TL57?6jk-s=|pWHaA_%qm?dgoTm8%bp`et7A_4{ zFO*tP({OI}j9D@SpzslOBno!fHnW7JbSNOEWWaXR7mTEzzZqCf8_q$BKAhjrcEKxb z>9mYa$9qEKv2539heTqF6cF(@*8-e6Wqswi0w5k9-UH7+5JHSU94t0KK(8+!QXrsL zfe?1pGK4h~W4_0+8?2WYOf8d{5eJTZyuFNp@=aCw25fhz3T%RqD+ZO-&w|YW>3;$6 z>xAUDaQbhHxIP}C!FHJuB7=bs(?5t9$?NT!kg^`@wIG)vByH6b&|}QAF!qx+Sfk_) zX#4}NNpl+>SP+r0&~%`??M6JD;YL1#rJl(Mb%N69E`yYSfZJ6vBY^1Q@OsDa%V#?Pz^tzp z09^WqGLHC^515Sefzt0I4P^C@ZT}aNwJJf)NHqqg3U=5U3@PdP{3q>0&k)j6rshz# zb!0~yccy*x7_Rj0vh05`vd{BSCKgM%uKo#$I3$W`KLZt-p zx4!QTI8?DGG+FPu40V9WNIp?Q6eUeMrKcOtM(Mpe$URraS_-xp)G|nZ$C|}%0VtI= zJ|Pn8`rxDB>I~I0y4*9mD_~!dLdmuEU|Z=!fZW*8*%pw#{maeKeNctuI{{Yy3R;T9 zQD6fPU`crD4&G|oOCK2R9*iPX>80o#P0=$rO#Sk`6~6|+Jol$*V1f1mIn(>KZ9@0V z8^?1689}0w_~QY9X)l?^0)Uz8OFUBPn`s<0wI-!0Y3tE7_UvdkDGrzJLO3IKd8J?n z;_3fm*fjTfWk}Ih6kNT!itN;+apsX6cDZw41Naz0{{c6% zWFuE3)#YgP+=OWK=lda2)EYgPJ{^Kyufync2KihrMz^~EmrBt~=^0<0hN&!u_`(sD zE7kR(f)**UZgCo!kO`=7s%c@Y=6 zQm2`Y%?Z#DAJLc0YOkYQU*MtWmWT7y*;W!}TOEU^Ry_#%i$Wpy35HI^X5RCTaq&cv zOFtI6W-5}KwT~@V9+GQemZJX7@1l4p7<-?M0GcTWyYr-9$fBWscH;&&A=3X5@*ygU zV3Iq2rVgIdpEzL?Mm_?j*(P6s`>)iAT1-nj$^7m_@zkx7hw>pv1Xjqo0-5K*;c=!V z!DEiMe`>jZMg*yurfw-o9(aAjh|#dHxfHrlbmD^`q^xKz0kxu-Sf|N}gb&knm5SrD z!iQLG3ivHzQ0NS>`{vSUA9lUv3g!BAEB@PS5j+$95%_U6CA5Y_vHbX{GFtIdm$mW& zahd1Rwq)h)WUn!QqAfGsSv8M(B88UR%q&H21z43oa?jRuS)yscgjIyG2oRjW}_XYWRp**P`c8#wZ*ThNS0h8|*$ajs&da;GHR{C;u z=JY4iK#ZEQWW&V>+d?rt%0(o_yumQx1~`c+Hk15#f5?5ahA>P*NM!-&G;OeUBjkQE zSkeuROh0Sf9juRDP|>>|sesXaopr;doO$tQcrHtPUhI?}uv=5R*WrjFx?G!V)1Y0}+ecN(Z%=M`$5lFA8KgJXp2;%3N<$qIdoUMg zN*>?2RN)obi(qjS^<0s{oqtY(gU0P-*d zkeqrJ95VeaSJ=9j*=H=(W^P!JE?nXL`;I3_O^MB2Xjb@vX0KLE(lVr-yOwNjo|4FGZgCv%80SaT07YR?XxmKF#Y{Lm9 z#e;ktRaQeXLp{~Ns8Ih)T8!IZ7KTT>31BN;cUkpSbM@*F45A1mq8Z5`fdxF#QT(vRJ8_Kob5GHsqCxT^@i4y)xS{mpV;8id&c-8&Uw%Hy8s0wvpCPT zRRuxU4rSK9e+96runM4(xb1Y_R0nCTCwBw%jNUqWutjFo)MS88bwcRCaJA#qB^Z25~`4O^ToMmUxT2?kzy04GIjv8UL z&cpO^y=1~23STi%l4QDsbotd*+xz)N^bHPjWhqk?W|00)Nqerrn>2`vB#e57G&>5F ze611exexUKC;bB;SyuZQO{;Fbq)zvKOoAH77W6M2*EvSAd<}a@f-jbG^}&4N|AUgf z(gEP(1uB}PMhxZI(baj`*u_$2oCPo9lz$BGgaTZyWgJVA!XUjcF#%*Ez~5Q`gilJN zE?8pbKeC)_fq!5E_E4t)kYVP&;PA_7iL)H28y71J=BxEG992KYKB~8LQ5_YwzC9M9 zNwmS&hsg}^$J@sRL>1SxAGOQ9b|~US1=56&)aV!I=HAUh1@OV#3uN!SHb?J*blVk` z@z&UiN3Xd*dff-WnDa=Ad~CR=%NHILUImh@>(B+!R@@9s~Z z-knBatIzWMI({L?{I1iHYll^b1{x|rPRY-&lH5?bxr$M}wU)NBi7wITeA6Mm--0^R8*;LT0nd3qQg-C_ULh`wi zdW3-L{CUabMz`KWc~q&@3?;)N+_Iu!u-D+q(!QQODWP>0zoKVA$=P(W@PI8%~_ZU1U(iETwz7GL>`Colm3p-j$TCGGTtPbIqMQLS` zH;(pctZAL`a0^ncJz>|4*X)H9F=EP7wiKY@otMCF-8WZ=Z^ao~c`P6Q5ug$XssB5va%8*2X#Y1_N{>B;+Y`E7{!z*Oo3h`t zO`iyPfg|2nf(N1z1EZzS{rtF;x%~RRV=6R;V`%AE;i`JPyU$l2O>&%?%@jIwaILZK z9WVeBJw?ZNwZnRbCTYTRP&Sj0bCxK6wjtb$;@H?pyhK91zw(>8B_hkuZ&B^g_zX0# z->-q>DpfqUtSnb9=e5q7%B4()$9rC$YpnQ2OKxlvU2U?nX+&pdG>Y5q3HOcnuJfpM z0=($bygM?yDlJ7VMgw7qq@6xGb4YXyF3+bxe}lJZ8zTJtl1k5BWjp*DT=?l!DOG?o zfo_}JeEKQCV}xxKA?!XYDoKdfI)0Bk=yIQJY2_`-+Bw6y+e9h1WNP3<3=qi0D@CWY zqQjcv&wk<1o0|j)oyvv$T3KqLp^!cc%WNj!?+;KMan4cex05}g)A7E) z-f@>L{}X1n;UgfY_C&a+I|obqnGqMn=#4t!eG*!FVy9C*QilZM&-xzBYE3k$Lg zaM|{{QZw`pq;MrL&c_z@=%qm_%&*z+{ltrpUJB~uua7fG3IpRHP0l{dhWX92JMv@# zR?DCt;J0`jb#spyFoP~N&Lq=ACHEM~`=~I8l30eOoaSfdJ;OmRHE(!IZpz;0@Mf5! z!Myk^oV$aCc&TFpcb;bTPUcHc1Qtp+RLjuFJ|j9EOM;!D#XKpudfUMB(J6W1kLG_Crp@ z%MVKgwdtmf4-)lcWlhGJ)feDwu=<-;fz$xb(MgNhDMkiyaT6XszA33YubFTPf;zHDNt*qINxV9W8e;;I zX%Li>8n<-_I#xMFS5ErAYm>$J`AyY|Rk6TA@4m8YKb(-}G=a=--TFhl?o(x{0j^s4 zfR4`jIU51DUl(?P8LU|EWASJQuV%fO^k6#u#+XfhoSIBs+_>$aYZg_;u?GY=wygFr zYS7eQNw-AG9LVu>wL@@R2A9aOazHq0oRuY*&d1zHh6TQN`r%VpUJSTz*G&vib)CiM zS2JugY38^TPZQo4_wa2lHR2K4qzTlTgD350!&E)nOz!DGiRQhmXRQHHEe)qBZsKL0 zg}K&d1KoLF#fqfd{ffhUZMjg|(dLd*wA8HU$89bTf=#d)DwpI4nv__e409YWlmTsq z_3$;2hk#U(abwL=luBn`_lPAn_9qgsa+30+uDZfmfdt}}F{V^xenNW5)MKTX!fGjT zs0@?R)nmkq#tTLjn3hIH5Nc-ob8aw+f0sXL??l-_X`)J- zZ70|k)=m}`b~IP~KnQpZPq+|Hx9E27@lbT4#-RcvRSH*j?$xdET*@>g28&C(e!KTn zYDH0)c*R?zQN~C)aE~6)SCmV%wNj~4(X5>}8x$DRG^KoS)N^l4n54I^kEt(v+(H)CXuR{^i|csAaToXSy$l5dA#8Q z>h7NLGQ{dYH5Ycho~L&kY^r>U`pUbGv)uz$aiQ3Y;YPyG6^~E)iw?=?W%efp<+m7M zs&l52@1FE1^2%za|9Xwvp9AFROYX)QJL~PwugDjrr7S}2l(o#wB!HbYmlr$JW1#|S3KH(aypVb_@KW0B(^%{*<6 zXlX8;cTdN@Pj`-<(+Ldfm1<+u&K>r(Sd$p(+UBzqS#EBfbM8>LHZ7Q>Q(j~`J!lm$ z{qB&maFu+R^cmV`Hj-Prl9Q2h`a*Pmb%~;Wh}R%4WFRHp(LSpM=T&b(`Sn_A>M3|* z3rr|Jlf(FJjtF$;v2JH2EWXR(J}5gKb}nQJe61VNc5rstnnB?RcNiCW{|ZiW6Jtl; zzlE-b<$%-Sd-bk|SQs9AD`oFlFSBaNjI}mrzm?gzaKX!B?LwQu+z-=?(f4B0b9-Ic z?%nhc%h7y>U+%~)x3JBd!>mbq{8QYTaC5pRWHngoSUDtz%ORXuc1hQA^PovA@5+U? za*N~U%5@9HuDp&@{OPLcg<;~Pxhb-gYN=9I(4HH>i4141v_aonLaP!`u=dSITG=cs z7Dkf1IMywpuf0uw2r7@aOCBA2FbQ2PLiNW38{Ycs)h=7NK+`~(rjwcdUoO^qKe}2w zA6Hndd<6#-YBaHjD200M9P>LjjNi+FvcnCNO3V++#OC;J(=1s0NddoXfi zI#oB0tHtGj{6*5y%NSXd;VBUkToV%T=*ryHV`*Vv4G!9mOm`$jo(qx5w_^0@L6(Yk zmeF3-FgPiuL1ha`c!fZG>wnq|v*{R|V+TR7^WOF+htF1`bIP~VMbMbQhFBh>_r zDpqVm>Loc^R+nv5MUTcVZGw3z2h8yK6S<{)(B!(6CUaa*B zm1@I%?zi?R?!K~KJa~^gHFOo1s`TG0jAG7jSR)wXvv)1cylJ^IZp8F! zDBv$bLa6}yvh1t$htjxa`A2O~&#e0-@zrqslby*FwZ_5*5pv-LTp#F3w{O1^Iw|qR7nUwwFSS{E(~Z zQesR5J(P1(?d*b%DLK9N`>{=XhyFC|YT99l3b;h9fVm4%dZgh_3jG~(ro%0i$5YFRdzw_iRN%Gh9Jl;5?iTocW%CxVx*{Ut_Cw2Om1l*E<=qJ-VtQ09b zoXaU88MHS-nP0PB&FpKyMbRByImjm6y;!fB2#QrDcdu&zeu=b8T!K0xF|Rc%$8u$O&J6L|X* zj+Ud)Y;8HhGNf@&eD&zN`wC;vTQZ0RN(^3#P`#v>jP3Ba;P{j=5+exdC5rmaq{%}u z7pZ`$g$?`*(S>kdf+jczMQgW3!*jY{2f(0aeLU-Pq~p5|Wf-6T0xD=VRLnqO%haLP ziOy!)nZlL8;ea^;15eiu^3b28!Luv}Fv}udaSlm*ei0N8Mu|6=UF|+zt6TdhFPzN;=5lwiHB$6dtM7aR#|6)1u$V>zrn<$>%Vpjl5a5E)_RW%+a`LT zMW#jTYRYGoHZ-GZcJDcy1dY${eN+X{7*7Jqks+0L;2Goe3(Ck8_-23{56zS)@)6)c zI07kZUMIHB!>jG(LICI_e*5S@4G=5Azo6#{#pUaFuS1bR=nu$;z7_eH??4TG4Y9oN zcf8tYU6EYM-rM%VibSVUaPT6Tx6#u#HF!!6(4*NK%ewP@jZkL#axI%8& z98Pf$lq6INf?rnz!ftvyW?+{h?3`Rj!nTL6kCLJ}R>q+lWMDDPW33)~CwZaAnKG0w zm1#xKqUiVAV+Bx-&}_QA(-Hs*EfWK@7d6(^GW?184h}AK>cU4q!HJeDUIJjHx~zYt zV9i%{-Q1%Q6}0l5U&GGWH$tM>pTj#eth6{!BNN}G%WPx}!gf^+@=Vv52LY1G1wZ#y z_;0XjvpsMQ$_HE;1Bga=N{dzv_CUEF!+l=!$Lj|5-}68C@zP}FfE6a*(pfjC5QKle zTzpeO1OEVuJMeN%pd@%DwnA0JV{&vG>zlRj|0@7s*E(p?jo1hK1@ed;LJQUgawPuh zjeF8Lfs};|nWTIXm4kFe`Ec$L$7FOQ z@%nInJ8!({sBKyI$+X_imyP|b>t$%iIN}Nc&JB6#I}~BL%J_E;FwAck-A3A$`IzX! zai?_TsaVwh)mGg%I@x>)w?n?9XKJ3WwJJPrTBsY4yBwRB(NLgt!4bTFsJU7sHx=rm z|KM?~$E>r9(LxPHo_dz;FfN=O%0#!HdhU&Zp9ybY)Y}t7g6RG#e*O5?XE|dZL=8d( zQ9Df>l3*u=-!V_v4xX12ra#5Od~c! z1DTqYbnViSpAs(1y+`{KZu<9DYiSkm7wYzXTVQ}^3kQE*zgG+gN?qE;^Isv~s5`{{ znBnoZD)S-s_d9KIwI|%htG|pAopk5vs1Ml4#3YAF7sLCO>msPQ+OQlxVp-UHDiT~QyNq-Re2aP~*5T3&h> z)>Z$ePgt;~SGY;eBE`@=+)R1Zq!Oflj|BQ<=t~r{P-vXeE%|lwy?85!HiPH>`R{@( zPT#K?bb;yW%;3&?I7y)u)3$lR0sdLy1ht^~)IEWm{SS8OAE}MMJN_$Qs1h9Hp{Ch6 zQSeZYF5QqhJ0ya=-;+YA_bc>At6N^2QbS$2AJ9e0ADDE}u@0u>XPT2QXyz}CPjy;J zwezX+V-qORzE>yWoVb1hwXY++x6grjoB^VVoBk?mL)rokkgXQ z`_GctRnG*89GfV=p{y$47)NvV6{jdhPf(m*&r+Q7+kE72HOyj7zRO{~hifX8{vTs+ z9TrvBwhzycBB7v2gObuEohl$9tw@KoNOyw?QZgVREhS1xOAZo4NlFhnfFLk5L-SjM z_w&Z_KF@c2e{jz|b8N4@t~#%CU2E;N)1r6QKuulyUWF>e6k-9B0!z)x^SbH;j4WYX zq~5inwvL#hsaH|KDI=Dn21PfY^{szgMt`W3uyGR~XsC8@btvY;3xek1?)7J{fHF0S zgKS~~y#aeiLZ=r1fpl6ZYg9_bTvL#w_>53h-!(j^&esuRB6ESi<_B!bm6<(&#^AKLtR6Oo8jZ}lW#sN><7wp$>N+Ci z1~bn$M2sGma->A7)*|=yuSxwm7%Fm!Z6OhWUuH1;*2adv*o{E+xY z*$+js@@FL^m}$q2U(W@b)1QfsJgZEx=<_zQQ-51rHG%?fwU0XDubx=9HB1|^Tt6g> zD*_zFlAR228z~AO$TEHaP9gpg^h&n%G8kvt{xk)7Y{kCW%?#{tx2YCGMniJpqUHil z%&V6&a@|<6r|RAMF#5HPi({k-krMMsaXf&r>Mi#_)hl%&$6ad`I^Uh1>M<&hK9ZO^ zR=$t`gQ5O9{48>KBi`C1&+%Qdbkg$lq_4cAImIR}n&|xv`Pb4%hhSFqYxB;u1RNl`|%Xd%r&fE8P4LZ!dvc_Inv!$VeY7cz|eCh5pwu&V7) zh>BMijKc#VC5cx~FcQWo z;XW}a>iH})^fXmwD8R$(kF|}#(gHwR)|BOGBiC8JOybhZV|XM`M(|$RC)m4NacH$( z@E?UgMBtF2S7FM^3^9{iK|Gr*r4S$T+pFb|C~kXfk0;J-S#@)A|M4)aDGJJ19P!zt zbpTNBZ{0c}-QD!8sv4YVC7~IdS2j{yi{j)qIt$v6ar?;LB3s#he?%W!C!7-7ZrICB zJ`-iK6iJEc)hHUdRy0*p2?*rVCY9}QbOQsSG!6-51lJ)14R3(YG5Ukk$wR;6C#_uY zj!&tt{80eZwsaZoNO=ewAxHDI4q-dVt(1lFHr{Gz#YPp|Zz!ld% zZZ__K9T8teX`YFEj8;|dZ7Ta@;&vKHB-I3~lRle=aO=%0*R9X+`)SV0p~?kkWz>HA1!*sG=Ro}eS|+R=x)Qd`Pz%VfrmS^@g87e)kMSX?+JYMv;tmZTEaUy$OC=9(M&*hz~rtcK>2 zi~o%PG7p4$#q~`CuBqV_@qCZHh z9j9aXwp5GEr>k={d5`&#tlqvJgj3VKgnwpb?M}#j80?h9M}?@!RoMCG+KoUzFuo#W z*WXwV99upVN@C8nX?By?=SKD264uzaru4>*8u^MdO#m4g>{J;&U&su3EkT2u=J`pF z>Ma!`nCyp58Bh(~&3V|%%z}oRzV0BVkc%PnvLz+gUD%wq+1}B8wFTLu7MMUmPE=4m z5T^v(_`(TlgJq^LgcH6blxc3{&H7~|*}{&2>e=-0hx7v2fCYL4K%dc2?5bPplLv*B zKNR0hmYXCl{5g(a=;pLFbMn@6UTHvt<_y3eCxEL#x&3}Tb0XLoDB}`~=;oqa#C5_k z#8nI32-&K+!n=1XH>{&sib(&$%F0eFafak7gw`R3a1Xf4F~u)}-dzJ#GUcqezQ&Vi z4kHZ%?fju1j(f*E)$K1w_2VouCb(Hf(8AycG0=IY-8Z7yp=PUe>m+@=_U@gg&&HPb z|DwrQOjYXU-1s27{zLV|7k2zCGqXvJ>$)n$8zoy|Od0?sAz;MS*aYQ*^}}%c6t#R5 zdC9-_0su&aQJ|si7nKe27E%p035oH}71kQCN-vec6n9R*_UC1jwSwHn;+jCzxA(vX|QN!k6yuR z=(PhT0n)Hp^ism61W9%A*4}YvT#b|z#yb)=Q9$^-Eii1cPR+@kQtNsru`RT3)@gH$ zj0bmOR+OoC1DjoC+~IU$iV6y_2^RQOY;%X~UeGKV(!?)gZe$W2Ht8=vc>gYPqW$>e zX3!0c_tT9M?z!$$cnrJ|outuj{QB7c>!Rw(cf8LrlK}_b=8XM#m>KXK@7bEGdC0{? z@3@??x)~%9mjsF>I0&r%fImXHVsr2Si>LUa9t3wGJZ{{3>Kn0LtoTT4aU zIk8$l6{o63qv>a#AQ@OSaq|y&*f_Ho`$xTrwu)uZ_E%zk#A!d1>^Aep?Tr_kJ(=F~ zXJ%fRXY>#b1#7m+zeC8d^*bmKR#hS1KZ|EkD0M!9a66BY>KVe7c z3Qqk(-7D5K;SjTs;3GcfEH9{&%m!!8=y-(dYG5J;Ym-nf$lD8>Y*V14#mHmp_3$t? zmQ3a>XtJ6rd3-RG>u(_BGiO3>gSv+7@6sGS&i$zy%$F4^JnK!W~uCk}bovM{nP zl^YZ=7`n~ThZ{zs0XZWjX;-H%BB;gj#$}Vgzvk0rKIUIc5^+;}`L)ClR+yGi-4G=^OFP9$g#C~yC%3wzZoj%>|R z=Bi=DY-~{=34`N8CEXwt)6!XYMC>n*0NEwo-iR=(^^`N5PfY=EtRqi-pi1j(F(ehvcyh3{Ytj z5@;jNG|oqyCr3PNWr^J(BETSqKGZ%S^kZMTkZfK>+xPZ`YKBh3U&6uaSGe+#4>4u} zoD{^0?nO*Z-C{1Aq=bL1nHp$no@ze0q}64~5v%Xi-e5t|$!#5)5jqq|MIQcg2#~i} zFqM2o+w+ z_9-I&x(#M#SFc-}JOZSc&|YR_obf1#1m>(bsK><$^N&S0Btr@_ET{`w0#$QWgON1L zz=B`(=`erS!lee}JEx%lP7LytwSTy#L1GEF2`cOw{F{Ft=3?KF(_^GQmh!*QNnBj-__Vn@f$w$)u}C*8cz|rY(xZ`v>${NM0WVx zpTdXgfWn(& z7UirmOIBy*bjLQbM5{ybkC~+>mD>*9V*T)F|?X}Cmc&wZ! z#7S5z2+K()bm^uIQ~q=NN=191@*aLaRn3*#OXcy(jJ^yt!yx2v`-!TN(P3*w_()S` z=ULhMj914Y%hEtfYf32G5(~4+&km_?W0~;UXgE9$E^f{taK$bpz4iU>&MlW*pfcf{ zy?G<}V zT#gqK7n<+lFmM{_e;0$}(g*@iWmXF*qCMeF9xEzh zkPGKE*2D*^B)=}YzAz(#eykPlc;lkg#|hz*nH)~24?h5mu*RZ7GWtbs%~e7$xAMd2 zQ_JLK%q2I8ORxG+1}3pm+@N#mmH_YFTO0D0Cjz4x>?x2}Hy7IE79hmrtpSRx02`L5 z;fPWAQOwal0B-UAJAn^A#$vDp^xS-BnSp^9LQHUYE_(kk-N4K!U_kqM_L~qr+zi~Q zP^r)t*in3UgL47t+gc3f3E+ZTkFVBH+7Z^{2H-|EJCYf-g9^}@wj^%}u2En;JR!Ia z+IOru8VFv}JCq0fPV@5yv(Le;bd(j%v5Urx$XEV%>S}S2WMV( zDFnXS#>mQRw!91LU^H!$Y3E?xawkTA+ldL$bX}d9xnQWQ_%g4I%pS9vq+JTQVvMc} zQ>K-m`FxoKzp+(4$FqGs#VrXd=gB!_S9>8{`t=)-F=PnuH`t;r?qU53vJ6 z3d3E%pkb{ka1;o#oC0^(k{xy&DVI@8H6;5y^L$3aQ}BXEc##^W9vfQASCMX)(_CX} zg6daCDFIj!#VW~T7hYjKkX9e=%npp zMk83k3}PUD1Ua~=Z+D8gx8sz5rKxJl__ZhShtmd?4 zyNCMKM7<2cXP-;hk{AXx#iQza4K{ACi_-gG8}|}7si1wh!Y(ChRFD$LD9BsbJR`xI z3lijONmA>eF{ow%S~67V*nuk{`#>025|0U017;%%WrynnOD|-a5Z7PkY30?t!{Gf= z$p9pYPV5pm>|s#eVAcd&unj+~Azfd>k;Asr_LG4sN znhnl1Aj)!wwm25^FweQMZbHzUZJHsK@^VNa(%jQw3Kc_d$i)_y*wz|aeL&ij=}j)( zfh^65($f$=BGlLgq-}pg!JP2@3_zIoMp4HQeT5&Z*Jp{4h5#%vp&B(&BN3paz#bmW z6@vq)h?0HfeSunJ_XLrW4yPgNaJNj6BY$2PS1&@w^%k?QC4K)GFm0#sZ)l|}UC}wD z9$F!`*!*{vo<>_b9b(6XZ zgMF0pEXZ_`b6gu-($MaJ1d1(yrdUvBrjS#+@Iq(kd`%@G;Vffw0F_;M3hTAlUmhIj z7B2EFl*KFqxJ&ET`yq6pdT=8+rsgM@ok#%ovGCzxW-(TZ+a~%)Lm0V=9u%LY9TxcPVMs1g$yNtdaSVDk}2W z>-$g_tUUkQM2`pvphlt9t}%LM>*eubttPw|Wf)c@u*vCyB_A-4Xk5l-naAH5^>IB2 z(fDK-kO1jj^Xb%J-`Dj$0mDRXSc|p%HFKg;25RUDo%jT>i%Jr^ye&}1DkBevAB#aM z>r4Qcc{X{`?eSG`UU?|!sgUXE&RTum8Y-0R%@m@OHbk7PF$6xTW-C1d**%KDneW zYzD))6BpA630Kq_-Oy)n7jxoLisehl@agh*W)J4l<-u|~>&PWL5X3nZ*p|5vl0y7tNSC|{ zfPPlaK(cr&gPhO1*587aQMm(nw-u7qumtyK$f}GNY&W;zCkUj}L%y4w`FA$O1uh>j zVlF+goTUJe!Fm`NQKzP+@MG~W>wt+CGBV-L*940u}OkH?ej9H=_jb6=>YAAoMJNy*hp~QD3ptX%u_=Xa&tQi? zW|UHSRuDFss-plXFuwk3-K&Mhyz}+_&`DQ+qarx(sRT~-F3O(KU&U>Lfc~P#Un@Z+cE~yB#NP64!|>noS28KI`muOIw}BgjKJw(4wT>}SVD%({JD#vJ!~XoA#zNxm5TkbsumOt4^b4s%?#_} zgBSgl!LM$_JVpA=(9R5Y=htA+Ae_w0UA&=c+Xr=2GSv`<&={9c76p{g4iO$k2ZKg2yF?7{m3v0bv306S~#E(N?zSw3StAjd4Jce95p zJEcKSZ~~wwBo*66046N9am7#xPKIxeU7YDRNnA!u5$Tr#NbmG@zo2-+kJ>K*4OTpZ zSCjO2PwMEz?pndGW-|BF9QZtu#I2rVZ&6f${oFVad=p6c{D~`sHEkaD);wkuTl8FUZW{XGUN$9#)5CIy@ z$|+jv7Aa;X?*`h*i9FV7gv=YW=w0(bXk!z+?J*Rj`G`4rALD@3pc!%otZ(!|L z5Ae+?oFuBJm~tNPPbdSjYkFl$60qVZ4s+pA7@0er|8uyI%Vy7)QrD0N{;xyeH7C!)EVwU52VPn z6gO}`RaA>F`^2W|yOG@*-7eT!;6dL#9}5;~4bQ*||c?27PHsb974Y{TeBj9 zOM1D%Ods~vay($R7B9_6fptBgb>_JhwSD>Wfu9K#&x*Ls;v4Oqk#Rbv_0mO$;If#; z6BBJ1SNG_dm2<#uL)6FW-WLPhqOzmbb1(jYT`j%Y4l1zJb|JRMpT0eOIv~Cz^gBvq zTI%+3OuOJXQcJ{uJK3j_LIc$?~}eVoj&1V&r}^hU^=1}Zb~ zzKhk>^6BYkU&T)IZgYM}?yl0$HZ5`PxwMp?D3%z{${l5`%Jsl9Iiy{Xtuov7q1TXp zUXB}uc!CUcH)2OZQDLxG37L1}_?Pp9?Uf4Z?cV6eW+yi9U|sftyX)(L`N4(iTZ8dn zpgaTm35|E6IW5B5zwPFIdq|{4aVJ3?iVnLFqNJYbUpN!+>1QD-WjD}$^_VVLM3Lw| zEyg#nduY8Zy0gXMAuI%lc@D$`VZ}rMF*!ht5rsekCEQVniEN;Uy;yr^$3wG*V&dtZ{|_N|62Mah;n=a<+wjda0b|K)2`+vn_w4Y9ay3{` zkx18&)Un>A7V0nB%Pin6uCn=I%AZeh zuD{z$>GL!g^Y2KlY-*}TQ|ZPFSZ@~g1qBy&ZVJjb*ViFisepa;v%m2K+R^L&ppCLz zQM62^7-4ncnIyXYcQ`B7nJZ^xAg()&cJ!R8y+_YJ-OlRz?PFK$3Xkuhc+g3=|u8Cn!H4&!jBU1EmiET?ReCfXc6(HN+^B5 z0j!cljNN_^O~hd;;5UN=pU=GsMV704cFOvKL=}lF|9GaK&-e=odFwe!AFS7=EqVX6 z6U#SdH1?kl1@7tr?qbBcD;l^f3%JXjLOQ{bKozkS0K{qI9Pn`vDuG>hdn=mnD7wJ@ zyFD7gA%bW#^X#Rfa`WAHQNVsRnSKZfKPb5nZFjuD14{$He(}6l60`_ldvF7lO?@x6 zO+Wfx7G?@blMrPMvPiI&4+^b`AM!#DXf%CDhOkR{3A z^hL@2&insXYUj&uJP$ON0RG3JLewQ>#KA7|s2Fc9)!Sg-qp11rb2?G`(4wRHYPsCD z{_1K>+SyhbQT+|JEA~9PKj@>ZSMH*cD3U%WInXWZ{3)Qu^F&^wQ944iSojVt5QPUN6|fHAQi@K|0@wR z`b9W>6+qA|fuOkpf=1v0O8=-I1dW8RbOJ9fUwDfzVAq4M;oHa{U9g(;VptAX5ard@ zTP6#he{Jw=S`&V08@LZ%kNqYFdb$0)EWRG#*}oefW48vLXzN_rb#fII-4H3aV|AeL z&Xd#3zY1@V8;tlXBsA3RCkEP}-#HgL2{=%?xSsH5=A=Pm46IHs z$Ttd7DDWONU->5t>C>%c-r9~&W}Mge@tnE59qx@pBRv^Dgspzy^i}$|I;XdneV2bN zo(ihCHT@%D?(9JW1oFjd8iwnqTEex z@O_>JJ8HyDzry7Iy)gis6Ocrra=Dmc71NMn@qXuMJnfW*r*Hr6J)U~gTj6xyb|NL? z7#~H41Bt7P5x@TWFDv zv3?6VkHClk-t$lo=QmRi5fEe3h-vt%h6zDmDc?)mF_?(d#TM#+pLVXbPHa#tnn$;n z?Pj`SqcJ-73IN!oI6TYoDfoepSvBxOrjWqAxhB}&EQ*@i_{R^@c5gfx@3hYug-Yvmw{;ImRN~fKH1sc zCTQ)k9!0`RGtx$bM{rfZE=xq{P!S5gQf0muBlw?D05uVJpxUZmW!o!xK6q;C5c1gX zvyF}k-4;4FZbwFc-4}Ol-c77kS_!3x29;BWbx1scXMA+E)(WVHR0I&DysMw^aD24J zuvE(eJb3(l-{YlMurGgV2Yp9PX$hvh>nu*ea-0;NdSLRMnK=ksk5CFAuCWi|Ie2M$ z;3f>z3}pm4_l2X8>6kC)U#&u>4z+%+l~D4poZWT)1#}0`Ff$e$>g*Fof+ubp`z+@_ zr0%MCoT1f6a{sr^OCc)>66Uo(&GznN_m}T6##%Q#uo|y(07$x2e9-=fpj;11h4^o! zdz$eE(BEzv6i4}BIyw&-edfXQ)cOcJWLF@7?YCC62pHS3pGhK?P}N~8OgLjVj5AoQ zSl0BoQhL?xChAAybRXBTCM?R@BsB76)3Z78=ZXK?3!tcQ@&(@`dbATb9z0jfIJzc* zeT*sFQNbf{ex|i+Z$8l%A zrwrFh7IHK{?eujcQwrTE(LHD5{x-Wm_^<7k?;Q2{-2{P4e56^OdF~MrwE0){e}k!K za{d?w2F1Nb=th9$IUXG`Hq8w2I>==0*@mgG?fKh!-;ccq-$ z0sVea>f44M`rTiMUD#T?ryr5OLGKS)8%E!$?%nuE;wh$p5o0O7I5M>kq{wz_dC5fI z>z%I&{wu`eX?;T8(PaMPGZ3!2;LWu{K^x^(&LZ?UN+#1e8p`XII%z?Zc%GTD^ASF$+lAj{#p+B;pp35f1w z(`Q6ZM*X4$_(gSXLU^zkLG638l9;2v1=IQV%+r4ChY@oe*<`httXFxxQB%0|rj|A5 zy!In5J>N&PRm-gIy3yh$G39bb$l7w9Qmrhy;Lr@-jS**NYjm(YqtEBEJEyrRJ!b9` zbN;74qT&j@Wt}`mgQ;w*$?NtbPmuPdk9>3T;$p7bg^E9`9y+WmqW z%(12_Fg#V+x@~C8SaevZfAm@zjFE4KidQe?KvU)FIVTaU=JZ92exWE4I3VeVEpHs@ zak1pX(afA7$TXqW&|}2S?@Fe-LJzi5O2>lMVxkQFCr5>&qxkN#cfHLB=D>Ui)B=Ni zcVb85(jAoU+Cy?VDvrO=k^ciz@!YPUkn5aY!O$$7?`&4rymm#>qrhFF!^S|la8;R) zD9O}yU4xjzLkEp+@AV_)+-EVJr;Cg*0(B#`Y^J~p;wk5#*2XqdC5fDT*`?2NQ;q@g zai~8|fjt8?z2oU0ruwcXdyJhKHat-5*ePk?#`=mQql9fFxl@Ssc~)S6{!a1}tAz!# z@$NCi?YGE93l#{P)V`%W4Pk}1 z4Qrl|Rky%d3UbatVz|qb#>}>7afc3lwRdQ!YF9;`b22JMsFBBfqF}63Tq=>_9(FQb zKC4i1INmYxAuBz9!TUI`@H*f8El6v&ogh)bUf-g#Gn7FAs; zX*eWq`2G2k{pgSx_D9Hj)nUd&cHTNwkD~&kbm8E2gTva_7~Tx=zJm?1KLLD#4!s@7 zUzYohl()6uXFHJheNF4L9x}aW&v!3QNgk7pe@Ja5csz1{F^R~boR3bw43y%EsSxxMzuR*Y$t=V~{_n%3#<5=20<>nXo_#pwKjTzk- z#fV{cOLmTqq)wu;r<3?^zk#f|@$JzM=7g@=h9~sxPb-N+BxfHPOm#+>F5PZvpQ)F> zF73VUXx1k#S|)WvC_1qR(WB-Z#0tsT4O~o>RQfcesWSW^qXGka%O&$X_|%WoXsL$G z_W03=W#io;(dxZ-pW0N%g4y?Z_l3G>WcI$T9mNTwSZrgsZI1vyHqk)nB?rGTr~+9k z^X^6*4*q2;zoO$@7xEY*M*qwJ>u^&~SVLrBq}xq7|KvZ?TZLZV4zl@;klQ|^7P@N) zbF1wc4W{7ny`t3UW54KkF0}|7Z(08m&o5`y9VN>ZiHFeTw1^E3W}PKD2)@bTiyL3n zpN^!u+6!p}e5_t&Q8(JV3fUfcV5ZjdQCMgFTRvV$z+g^gYoP>-?v3+XAS8$(_Jd_9 zJw}0sHas)@F>{@*u7)^v-it;Lc8sqW+2jpEatqz4cwI)H)gpZITMze5+<4& zK~<5>KO!n+-`jWZchrpi)H$4^1&%Ek9Rp}_vg+L^w~$%-T4JI-o~>@3>Pw^jlSL9KNl!Y-fQ>ZY(`Jh;F#_W@>!&AV*gN<|CRqeG<)7E~Qv9ZWnXM-0y0rU$%}Gx-*3TDO1A1>DaIR+t$KPN%ulq zoz?cd$@Xj%F(v88s)qF>9^L)B_EVlYyOl~3o{@Mx8cmMJQ*Sh{L39dzRKiuI<~$fA zR&pa_($YW7CQq)HU0u3H))JTXF5 zT_?f{tc+#On4*|QSjPg({IFcl*%(lXj*=@x^3_t<-EXrv})G(7ZtHeJ(ilHXiS z5$mJOtj8Fi*)hD2;C}2c{QzO+708{BYGh=wl{}R(=(Ba#&Hob|aq=$Oe_R8ixwtI? z4m2LV3OO}C{RG0}(@MW72bweDIdT;eZ`iI5$gA4Tc z1fe$Mx7eTxgKXt7JD=qRe~!Uq5E{v#0<{Zt z$Y$q*X`NGG^ z#&p-`9<&0}XZmSq;p zJ!F0V6oR%$rs^c~RB$Hq5NqB%zA4+_=f5hf_J#HZyFyRatiQy$-ksFuX|#}#JS$r@ zl3>7iG6@c?Y;^j`rld&zq+r05aX{C)79FmVFQz<&6fd+98x@L%8+uOUe&1++jdq^c zjs5&MKiAT1T~`pB_m6yciJ&+4=&E7ZpVhsCp*J6YmGkX(h)hp^dj49{x|0$7Ys2Z! zJ8e;Bt?O%j-3PCy9S=ybKhoZDu$8;jg_daYpy7n%81CiMkH~Q>z6v(lwHWdW@y zIG`D-?RrvZ6FmJdH&b6NgP+ysHSN~8zSvH0F$7MQSzq{_{l@#$+tr4SW}}ZqB)x9Y zzsYfQqiWqM5S#chjdrexW^+WNrAaNnVWB9EgllYMYK?M&lkA27i0*qfJhAU-z@fMp z>$jbEt`@{mAy}sBE>c~br@66@pvN?Da{~(95T1NDlAq?b9+TuNL3V)ho>P7jWeWBq3U(12c9!{;&%t!ufD@M_rwlPg> zM+xPpLJnpwOa5i)eM?oKp8sy>S*)&QWA5TUgVg@@U2>e)L~dtI|5Twm6Zn5aoBY)H zKbno7Pv@IlXIlKC3_DXRh6Z=q^Pjv+L{dR? z%63l&`CS@&R3-Tm&gS;7eUIGuZX_ddIKKAs^?UsN-y+|E50MTV5+GOWTw#xo|K&y5 zmzBqz9~#f0AoqWq)n9QY@e^|9SjA?l*ipe(P;6$-S{IK^yg;cIdU~%uGU+P6n&>GC z>&zGV{IRnuYF5Kci41Qz9v^+6YUt4>u&I?_kaO4rHQVobkBSuU9&KeU$Vqfk=ZYZ3 z9q*SXWA;hEe3q8NN5+zhH2$XI-o)-DPzbdaAFv7S#h)IV25$%-zjpfXsvS*gi+CWx zFoZ0Kv1S-u^m;C`KAJ*R`BvS~b<&b0j~#1TA>Yo%Nk&V4OD2}z3%2~aX_1Y4&A;W4 zTfV)F!Q%|w$#FxjGgFNmtA~YO|J-pyW+oQlYjrD@C@RErbV0?_p71+;n_va^SF>#D zpQ^0}$^9O``b4D0G+U1%*-y2d_rTA*scjhsO1YAd8dNhD;M?*(1kh zV}ukmG1e#dHs;0dGwW(jN#FAOJ@h=|8vS0%^_p*)N|WiKSUE>{tz)Z~Kh|l}P9NZ# z6-=Ongtx*Tnr$W4AH1pRZ+}Vo^u&!^sMZAV-DLmD_|8}E6xMarhmDg(0Am&4ytE&`NF_Ozh zJd4~iJXanE{W?qf`t;j`)xm)ejjXIGMbI(Af=4d}Pu+~??huvQpJFw#mN>4{E-Vkj zy+<|f-<}jUTpcL0`i(r^oEG>zP4dIe71YE>kyp%*88Up~rRcIa9Y}argSzvp;3H zV*DPO#=xyD`F7_7aW^&^)&;Ss>ZXBB&(46XV~sxd{ey0_eq*9*M)^$KUVx~W=SMcD z{%$VZgUP@C`~j`#{z_F1VpeJ~UM`DYOGOT%Q)0n=jGC$1sgyJQ^3dS; zla!+LyUx!vU)TD+9($@Cqc#G!i$FrM<$^T4l5(qDSEG|dz2e%ZCo_k?&H@cGbeml1 zj`zEe!S0?%-hV_sjC1C<+IBoTwro?2ht0F8O=?Pa$?6L}*b_f4w0Y13zmI3`h%u1_Kouqe2F1e?pmQfo;;{BjUC`N`@DDS_=Z2t;`{U?eJ{kD|0;xDs9I!XQ65i&ueSg4D*&TQVg|=z zNlB%cD#RF=7j*#8>3+n4y62tN7gU4VNtHP(lSkD;pqQ+eT)9DRa^@w9#Q>}_fZ z3E(*0?$wPzK1-XTpWCMnivt{{@yv{hi)~_mR28;=nk~!Tw^dfy<}Aoqd376gTqbL) zH1B%Y>s~5tjFQ04T)7?<@I?ioZ|Mff6b4Rt-J?B4-bV*#zKCz!fck!QvRq-f>L9nZ zEGOE$;+J|lE<`{ah)XrbjjE>&a-moPbO8(hut~I%a+#4`V!A+Wco+s z8B0{~3L9kM=M}((&C&u-Rn)Gip0cm!-23oHr$V2|w_=HSFwgdV&qK+T?ZAPjm7V>Y zJ~Xt9{KR6Z?$ls@1l6Tdga2xSog6E^om2S`w5Z7MetqVB3RRfWPyN5ek-Rg10YyiZ zB+PW-SVoo>_VI;1Ow)Vt)+CiVW=5T=05;!zQ(>aiI!=VA_+)-r<8;M`2aDZIl4`HH zhI4-fE-tM5spkfvs`iPe){NGyHK98(pr#{c61NS?k^&lUGXX2&~pF91zb07IRk(W?w zw5>!%A>Kh4Qqj>(3Yr4R$sRBZbP~Uj#|<#?L8!-%!s5WsG?IL#Z=xd-@mDWm``T)J z+tai%{pV7Ak|dg6b|V@hJ88YwjmBStmdzL1JSfn6-UA{g(HgA!g4L;MRzSL7kbhrjr`;})yRI}b8TUBpyXEeG-`b{O zjSwQdGwT^1hE^d7QSxmd2LgsDGzB6bs!89{O>q6}Vuo$J&xvVYG1I4@QI zx0BvKx7J#|M{~Va3rzS|u@$xbp~ic`xNBBzRm2zY`;=eUIdY`BjMzU}qBJI1N@>G- z&2p*Pw4GYQX{>vDv6g~-V5%O++%o8S2Z*F!A2_Jn(@PHmb(&NN3$*qGIgN8ZgZ9v4 zUsQ|Iycn;EucJhi=v9N8FYi$)gBd58CD0yB#AswV?*vK=sD;4j310EMhu$J)l{}5a_J3$Z0QdU{xSylM z#y^H^@37z(CfpYUpG?A6_J!IzT!&2e*+TGr55Qyn-n)Du@h1~WzW{ zT~c?iM8^Tni=7X2c7#V*Ewn0@dF=RJWXHEeK_#*a>wwSWB;Qi-J(>?*{uUU4x_!CR(E55p4KQqEkd(;ux+`O{b~ zTsRwO53Ejz{a)kCFGV=p7%)ag9%~W^QQL8UiS-x1RJ*_%EEosfbsglttpUpZ1vby2 zP}ZQdsvN8{{pVq(V9F#R&ksu(n3?OM`fq&d5yOh)mH9_xW2oz(n1BYLk#5+q44T`T zr()!f5KVXgkE5i{OvHyS$7@b9GsijL-DGbPdj=@Y^G@wIf>xlq?-GT@$wFNzVgtIc z#h!#TCNXPntIVZE;y(}auHX1A$ndtSzk-#H6jb`d3|%E?+SDH0{_Ar;!dBId5Pgu= zgBKzhsQB+A+PYut{ierK!ZvKEu30a^WwD?Cgf78~b%}gk3WQuc{SXR&NJ&=(B*I)? zS&+~GJsS$Y4R+IVgVeon`1{h=*ydYnk<{vc3?e}-ema~?Gg;@ju!joczpy#j91$(> zH-Pfb16Ee#%<|aMU9e^Zq2HC+|5^w%!v3?;&3@xvkUp+4K5`mkDahOxQ7khQIJ@+Z z&If&ojAuL6f4Z)bWs@r0=*aBHrzl^FY}w3FZ5r#QX2CX=#}ME%utysE9bw*3$R09v zxk^oe!22>B_pdzjxfv464v;7-vg}H=G2yYD+|g3lSOPg@CEtM)t|5o|&nlbquacR^ ze{#GQ3-yG~mXtw&ZX@`?|76YNGWCk6q!K%?92$!&M}!EK^I381?9#$R;OZ^He>=*| z1|9}^$Jdh_%(&liGLyP+>R;+aM~5G)QMZR}kv)X`x$27T-2KlqTDH1* zZ`_>UGLn8@$laU?`zg3{@%`m~*=0yHCsLN2MnTSwdx~q3^1X*^YyC?h$$kDef{&DX zq;8w)rFeW7yh_uwmV`~n6{_nTr!VHQjJ4&z1k(Z-#tNq07j(A(H==!s5z2KykjEZ# zL%{35z$yIGcR5$z^o(<}{ARMDzHhypDl^aU{mw|Nh9MxVqs}v z%d*|gdj?p*AoWl7EsX)60N%0Rqlo;*bs&3>sqQME?Zrb(zeGSFCGdEn!2IV8vdl}R z0+F$Uwp}+??${Lg#3XS{olvQc!LqSvnxiS7ip?^>WyrG2LoZqX00|%h5}+9*fCxx{ zi|_2q)@@K@Jrg|g?-@D}zR!HjFMluROiZ;G$t{R&T!O0ZFS^hM5fJKo7^&O$!>G-so1?_$bO6H0k-?9!tYSHTh@-Ply3*- zlRrGJ>i zl+W3rcDc1-Yj-weP=Onn6+16~GhTn=`uhMcKm7Nm{uccW-t?AFZT`P-Xav2h@!FYO zu^BFN@>}i7`Js^KBGKD{E%{eY*f^vBN<0NhPyi*s6fu$_ZojM#!lMIxuwvv1IetXt z%HbS5zU8lZ-g+%CZ$195JDE_VQEp$wOdB_xo#AbpG9pWR--A@29ZG!OXbD2egXtQ} z6r#0bffB)>guo^5deso=`nxod9DrSoH&`LS^`}AgTJR5T5Q>B=GciL0zSf4-ZTcJ( z7-m#&Ma=wRi1#Z)wDJymw>xd6RX!n5U^J&TY4aX2U6FFH-c71e47jHkZr$UyB5=o0 zk|e4O;h>Nj87*-Wr1^{>$m-gt=vb$0ORK?c6j}C8=%uMv)AkzmvF;A&)b2OUAljQV z9?3V02oUeQT|VxnZN1$_y%~5ihFB)^80w~8a9deDxw{sNN_JG!K=dud@A2s_y=M~GW!6+0P!H4 zCA?Xtu;R7~z^LI(LnQhhm1Wrv#U?@A;k(tq-W9{5 z%M77J-k&Qcp$Lfm_qAc-_84W#I-q4)Sd@sUwZJ{CjkEvpDb0NRlf4(vLy&65z z?MR@cTGy-{8F%<)@2ca<-Q;0E&u5tGLk$aR#3=kMewa_TxghqC(b65lj(vi zt~ouhv>1s$jdiSZVBubf^5LgNdEW*nci)5XCs?a<8Z~~A*4okr-LLn74|i-DZv0D) z=PnQ1ks?fY%9KN2;O9CCGc8tDFtnN`bOu3UzF@6xfgcj{h$@!Y8uII)Z0hr&+-~Z2 z6IsjC+@G1>BVS4%b`j#pU-rLVZMLuvTuXbh@U>o)C9kDa-(Jir-6zVFd8$3HD>E!j zhkbm$@tj<H#+48^k0%RdrfvS&az3H>- z6H;5@4bN|eK)*!XWUd^{QvWIRN+XohA;5MGW-;)Vny}#1UajElxugA9Q2cdnZNo3p zXD_fiY^8nEtuN$m1!cmAG$w)ZZZ|9ITv$uZxO}S}BVIr5giQ5NW*|(e+!k_z?)iUw zy>&p;Th|8q8yZ1yKw4@50cjKvkWdDt1QDc5LJ>^Y+wtYPOX1;l-E>0t4l9Dz_$4%8t#E>b$cQTx{Kw4sNlo+od=}nD1F8y zp~q=A*2E0PPisTf^AAccb8E>lcphGKElkNOn-_X?s63YGVQ5Bu50j|EI;fv+**9oE zOmz@FSe|>?X<4PF)YHX)U8Ly}=i&9mk)cspmnMZP*z zH@B9u_2B;}hlyN4SLS9xAy_YHrLUm)^y^?CFLHo9Ss1%`<7~nAiGE@2wt)$!<}Wy> zqoJ#7=6Ci@`CzX;C-Zlko>7nAVPuT0QtLFUjo0wQobMj8Vgc!#g>05e16|e7x+X|@ zGlfr}*&JR(VeN@o+nU4)xyp8YxfY6(x-~3 z7jE9@Oe}zPu>$4l9P*X~IV#U(PNcYs(?c&^RXa-kC*56t-vl=Ld+?>PQ1(!HZ0*{o z7{(hbPW8gtohx^fXK@UAdq4Vf`XGDB`1snnp$I zOLr9};%hQMQvFR{qQGtPMlm3zx9_oew1ff6Z&Qaauwvf*qcGd4m*~VgY@-VhjGs`1 z>dhC@%r&!+Bfq*Y4%e{Ei5o8%@t~HVNQfA_2Ws>bNo)i8{5~dc{^*LkGB;KzF_U7U zq7b0kwdq*1YzCF>L@8r9aPyv`Ih9pN!3~u2_;HzIz?%Cv^fPhp$8WQ|f17m+aGjQF zSs4!;|8Oq9rIq1dd%6+cY591Sp%BHo(r}_F*A@|+aZ+^9=Os75v$6e_uNiUnTENzO zOo(!+Q|(3fOZ50Hf@JoxS8Qr2JNF+c{p_ONyWk$rc?&<89!1l$>gjs9@fP$RHc;KQ zR}wBzO?pHv`VzEFY!jT55Hr^_Vl|}>B@N5l44i_$Ls!r39fw0PfU{K;U>`!?hE{zW zqR)>VAr6Ul@=YU-6r8GAD>|bO->mJ`ECi0`Sq{p_T*czkVpDRNiHmK&k^-&kKQNy-`>>2 z`<#08nx_L+r!ww*WS@RjA*pFVG?n169vq`=@&;`T*h(J?pXWJQE`hjuG~q zt@Z@m5p$W(G0EenMbV~CoV!ZET4Eurhpp@IaNUgxm`)`^1h2t=OipI0sHHNxYA7@6z z)3ycuJAm4W-GbI(C@<&@bd%JG3uO!Lx_F@f=<$Clk1uuUYtcLpr&{)=2T2n+3tLo) zzjvLhUIK(&{t|Q{m-h~-7;a`BNGgi{vW#|JH+IVudVgo6XRL9bn*!>RzV9}@!lUmr zc7ytSl?*$v=W&fh0Oq|lbV7-$-1$u6@*eZq2X;aUSOhpl48sm%NJs|qz_Jn$8pBkF ze`}Ogp-{1~h$hezuCXQuEk;qi6*EoP7)&$3f_%cn_0x0g()V9j^ac?KCJ3Mp~p6r@>Qm*hLbubMUP^JuHWb^ zs#_*EFC9U*f8Cfp`e{Dy-g-2C``H|Rw*<%PYa6j}%t@3^=P`B=dcB&pAc+he?zerf zJQ&&??)k~SyV=_hE*iOsZS}aLd+<%v-e`WTKuB6qtY`)YiWIhwrFCHv3&jI9K^~-& zs%9}WSVz<*(V&$@=S~wfa$}1y$_MCbmToA+k-9cE6u(CU;3Zl-ErqA-uJ-><2_Hso zo$YFFexbV~E{ z!HhUc|3}yAhYMx6(LT^~l<82wAs`Q#PVY7C~3d*r+`yJ0I89tTK358)|s0v6eg#C2`d{qW-#5@LFD-Ak!vl z(__da=km3O>a^NEF+)us!okn`oM(alwGDQ`&q#c}MAcArXNRi(bH4BE3fN^;XStbN zXP6T9IwL^Vgt|_sPW39V7WAo{qiyLTwILE{pQup}dJom+6y$k;()}O$r;eSjN$gKI z4;1X1j7aW zMW4((!_-(-CuHSbLzbWRTAMe4A1puc+^g6g0EMA}@wCB{gpt?NWJlCf zs{(dlI~Cx#AcU%H#44n#%_<=!b_KRx2?TMwYk#AP_$(4`zL)q=lqnEaL^zD)mSbTw z%4KtV#3oeuJu#H{vb<RsjGB zjtJN?-s>HP;ewEOx1U>3IVMlNQzO^GPJhRRz0#k`CsTvJuG@ZOsI>xn=eNsQSUXNR zHSS6@>~$bmb~3;LoX3Had=bpYSsF~tD}$nnj{-dNU1+bHwgAKknCA8dJD(r@;K8~mv4(el1<_+@r zLH8a!%QI|#*sOt8Bzn8>8p;O3!Tv!oc>zTM;lV!OC#P5UVLr(O=TPS8#gt_ZkpOA} zoGf8SymuoUUg49QNW@*oMtPwdEo|D5kaE>HpSCI9A`MIGM6(q1h|UBCK~S&snMIoJEIR2AvH|GPH=<_ zfaD!Z#V$L#)Q|y&yQKk>W`x|6zj7a_J`+NkYLLp*gzsvZS6ryUS&@OTlgFHED$kb? zoXc{CaE2N8I$Vih9x2rXh|(ik>AXcmd%=w|m9SUYynY!$!uO46)2ZN4(d@WA}vnC>X-oSlQwDs%j}BxU|*mY7 z;EfCt@6CZQtM_fXi7!|^@W$Z9PB02WFq$-F%&qSpfaxSzSRY7APV!d{)>IEWENRz4**JA< zNE3ELQ=cFr_y%4Pb)zb%HzYGLe!evB4{qy0%XwSCeqbjjE#7h9CYHtr#Y6l{(z1p| z)zh~)OfYHH!B>}N`s@LCj`4?$>5_7I0?@?29|3Crf@>oKu0}?tJIg@FBw_tmL@!v) zUWd;xk8^RH4Z>dFNprP0UYfZJhgP6G?4pZc?q<;%NaQNmY&?YUWOPU8uc0nNKF|&9 zKLbsZB>D?)S^EJ?=I=wfpgn48>X|&IgW6Q#hv-lxz>n4U9Fuvt5BN|ophB*EfAr{J zT8~CDDRvQVBQB%}+rOhbiY|Q*W`!QrQ~_7{)#g`UdQSsesbfyP#gs$DAEDEesqYR@ zH_xDs)k6hDq4$ShAmF3b3SsMbmvDguIf5ajYczimU0A}bNouX?MQK`8aSl;$ycBc` zgWxn8ycP1X=skZK-B)q_1s!8IFa!+;GFV+xmjqrF*au0zbV_PPLPJm<{w0EBPE_Zg zHA?lEC0yCf2W|0G@^0t{W-t~`*vj~5bOoS{*lGg|hF{L5@H}v5kiJn_#0nqNcZ8%M zYJ4NEveRN5X(@;iT7RHP3kRIe0?6&}`b(8sifqEPeDtr+^{Ee#7>4n&ZDtuCC z1%S7TLdTmUzYKxsD;-MN2{bpqF1ZVfBg_xKRagXW9{@kENJ`G%8_;rO76WQ<X#dFAgh5lCR+L_o;fRQ08?YP*~}}*f8ei- zS4n*Um~xmEFem!vkNpF0lF@u`ag8XMq>P;WdvL;7U9u?Ps#By)cjrY%u&p*Nsi!w;NEv0VQD! zy<9^ojcrae!3wJ+G2(%^%8pny)HP@@OA@aw0cF8la(Iz11$&;Tl0`(#-l^$%A93BH zOTe!Wa6f^XK7lGFL(eHx$^gP)9K-2Pc5b2=VIcwK(wm|XVULEZH>31p#|+GP@UbV{ zWi(r)xxB1NCj>^-fC5HaoJMQgJ(X;E?yi%UskDE81V+5d2%%BJCM?8yb@A;XCFB04 zq-8LBU9kU_7mSI8c5NHjO@idC#XFnY+^9m#bV+z`*R+2f)U(fGLftgq3rF<%c0wMo zE9;&xc+=ljBllBdoc0W!koX- z#0z2b@$%q^a%{4A7ir@@62+$Kv=ZYjdHIu*yu1nf+rq0gtZzy5-tYtzw3jyzP46|F z#pvHF>?%kbn!$W+6;RmmMn==L|7BN+?F5_l-W=}vpZ@T8NcNpRCa$bmP~5fwd380m z(RGBEB)}jqMi#6S zpa#K)gMBFs2{Mk-8ZLAj;suheq<+{A%@JM)OC~~5n^(|JS;?JaAny?C2hoa#Co~y; z>s0#by|ZbH#P8|;5^Wg)dlytDzCS%)Y!JiG(xboOICk**pusRq=@yfs$dy)5Y}C%vKX(%@PfA7Sx7`2E7*>y%qBZNAi}htEIwh;Lk=arfceWVGbT)xyv{LWTMt z5;s;#&PMY!pXQJ9b&0%>zJFBMUXXTXzw}(}g{;?@!$X9|a;$)Th29E7g40j-n=?LZ z=|V)+A@>(n0FYC2$5rf@mD!l6;&7}KTtLoy&FM*CL|GP8gZ6?bLMpCD(~<~gBQ7m;W~*z)qn)Im1`Dix_l*{ zE4Ol!G3;GqHqNxMW3o`EM$i(|RyG!>60so-tbJvG>}drXtoaK@lBg-@)UUkAmaekn zw;0AEq!>T(u#Qj`tUdG{_Ila%iP2t*++Hq^9W)5cUDCLcT^IAwHO=2cS7Pqt7qow4 z8H#5aGV4F>B5RlhH&mXT1g%Xwvb*`^4^Oyz9KI-2|(g`lyLqwtxG za6oXbxY8UzAVi3B;!hGmEKoYuz7(_o-NoLgm3IA`Jyh|MlBet6V?`F+H!R;v8m!@M z4{Ip6e()Oe)wbcD{_^G54qw?hstOlRq(zn4UU=S&nJn;tJ;z@M8mzr22Wy^mW&AN$ z<14nn(H3$@2P+Q8h`mhKni6ZCaFQOv{fmF3_5n`D?#>0dYDwnP@UFL{dJ**ACtV!v zv)G!ZO{w~^wOEm-JA?`g`ECvE&hHdnOZ`&!(N)I>wXTmh()AY|SUd(iegZwMNV&Cf zEYJ(Tf{N;Sa;OCBxzMQgH)E%Y&%+bGx3k|oXu<$x*ls=N`Pq`;U3_q7NRX0F$&oSO z18+CK4DWn5sSf*h3LtQT9B_+B8x?_sX?7))<_9>PLlG&GEev8q^*H=w(zIoRKQ(Xb zR-JG`wPWDMACT?7od^o%JM8rgP&7ZWzE!c>9bB@q-Vj1k`eaX*M%aF;iXr7$gA-ld z7nh|d+iNc+jhnJ@D3bNtF=duuv@vuG_S{ccv5^Qf7n?+Aoe0$dIl*q?8o^Ku(71-y z`BUIyzN;qvI_~&a4RwoV=e=|IrUk#TS?~0o)nZ2#Ix@zd1}~npvE7W3P7>%)_aTA8 zZof@Xp1$A+`{EDsv(~8AU_9D^xn|_GlD|{&k^C!1jO}n$-W}?SYhp}pOI7&+XB4d` z#-ZbG1^4!IM057i+*`A8R^h(NY#bMjL{a>J;h5Di74B{Gqeu22XyAX7BtmW9H922v zs5smEtGt+l)l!Q0E87X=RfgK{j?WiUGO?XqK8zRI`Kb|(^5G>Yth1oDdL&&Hqvtx|6y&@zn8N}NlVMBjw?_2>Mj>1&^b+KjD^ z)->z9MPGs8#I0||Br}yQEcQR5x?{b<1~EgtFf`73G0101OIGGo#Z=NYAL@wy8TZ*X zO+(7V*w+JHx+DhVtu7mc%JqvPGZmXTd)EVQ#x&lvZO+EI4;|xMRg0-AQ{WrH*69EQ zcpwFid~~}WgWvatd$(&&(9$*Z9dhOs?5@&(L@Y9T0#8|AvltM<;8*-YLX%Mkz|6ug zcJWTeoFdf-L#RX&6!_08vUZ-qSG(COT|JU5CnXp&r#PW}Na^T z9}tGajoC}rND`_~KP(0pDV>`3MDMgY>~vTwv++AQfN8@}xsy$(=<9=+(8e^aCP1Cl zzJ396!IxHnh+yyVipU#TAs*PRlt1+=qVFv)b9P%)c~TT$aDl66t0nCP+DJR~9WQ*8 z8sQf*dS$k_zxCVek$uYHS1U50hRe$YYcq9?bkY(^gQa5w&d7%DLiJBJy~nXjgiybrZ_&Y>)+2tyiD&9nTa!H zk;QR|#VK4OZp2H@Yoe7qQ;^nGvLi2RVdLqce9M|T%0)1#57j}RWsaJs)}X6c`!v$` z&0;Wag-7TvU5;~b{F)SNI(3>GLRjjyC%bW>=)`c(cA;VV=v!Mh&TL3(Hclb-J`qY& z(k6DatF1@!qbDI~co;s^C5d3Zg_F#T5&-v~?vqMiK>dOw@z2)f&QmCFk0?i#=Dg2) zsz-*;%?6%4UnW_gt=v`!Uw>r3X?5-l_M|K_V*DZ!=@^e;hN{?rlrftlew5)Sz?XJQ zG>{bpI;89%InQFxUzoh_X!ghE2v=OzlbYFX#gfMpk6J%#gGk4m4cplAuK72_rVq|- zSC+ib=0e%X*pt~tZZ>!Y-~t~;3_ZXI66@?ss6B9l$AK{2Abq3N+na2q;!wZAMdQFCU^lj2<3GccnL zG<3gn7^3*bEO)Q{jqlKBz|%Ps(G#6>9hixH@%MBUj!70P!^Od`nf%yJt#GGC=Wgf8E*VZ$KbH?E$cp8W$Gb%^vcsBYus=enLtA0#a<5u zLE*5XT)~`8`lrT-^p{h zC@;DgNnhB!K2U-w1|E4kyzkYt6OBp6F3aAwsWN3}bKao`%~BF*o)nbi(eFb*Dgapf zhrxx`eKiX9cvS{r!#JCTf}k}pR#e3 zzK>3+`#i){sa-o^@~ev!Pzqp-ces-*AU!N7%#OU~ZpyD@ntvT{{VVP&trlkTA^m@%IQ4$l29l117**-46h^|&N+)KtW7D^@0 ze>m^F$vd5jEVrIKo+tpNs@sQ!MtTO(;f-3~$TRt2FN1?ukM0^)ji~#u8=OAqeB@#3 z$*?2Sm$U}ufw~}_4|Po;m$}oM0(Jw>2)Br`(Gik|{9)$Mq~u>Bg)d-^%~lS!Vcu8h zU(D{bBXIO;HPBvj)G5Wixjt8r^KnJUy9ACv-EKKdCT|(u755(L<~334&N*!tOY1fj ziD5EVc<17BIVPzlq;pIi%MQN~G%B~Rfs`wyu1lg??*@9J&~<1T_r>3R_J@DfP{bgO%Gj+fSA|#?igd^le0`S5$AmMrWbr z-cLR!(u9)JANo>1SDxLEtm^RQIu9{ZCAJT0Yh132KV!{iZ0_lc$Dp<+F&*I3i$na~V^2E%)sS$SNbvaVuHBB$eo|~!Lu~#Lgu$oL&l*-6cJAyYa{at|?VyM_ z-aHD8n+y6HmL&^~Ld8}RT!0b7fE(xpJ$*NG4@2q&&4=+^=W@gPmfJ{Nug9R&4;iEp zs1u9?$pg*j*x!G;5@_^gaHb=&d?|7}saBaXPqg3|@VBsSZg>ZZa|!c&%moLu#p-uh z-e9<_TvTA#=*`Kw>GdFgfy$_h*e_E-hR8*P}w=pLZe1F ze8|&TlZ_KSVc-jNvk(M_Bp-lC>;k=CoWC*>9$^g*Ov{fs%D=G^V;O7KkO%MF3kmG! zxpCiX%tPa$^H^n%gnhQ(H@$0?r?rt*|KAiRlK4!@_}TRU@$oXVBh$X$5}k#s*AM0D zmDrMxAJC#iQ2EezaE>A}*gvuc^mg5$>#$|)7dX;4{$V)8SP}3;$yQ|gaxE*k9-@Wz zc~36#ZY zY!jJe4-5`*0PN>rICpCJ<)*`KC4ZHi$2TG@^K|>?*uh_^i=395B^u)t=ac{3DI+Lo z7rYrngT|o|9|r#*)8w38Af*%#`h~>vDLw9 zgmE5;3ep7{^o^G=SrA6`hILP!X1Jb?z>2czcyNN z`b3Kbl3YKWhCZs2b;`rAQgHj+l`p^|U;C*)WUspu8FeW~{TXO>5aL{FDLFdct2*WL zC#JcIP00UF-p*4hY3Wr!{PhN<*4oNh=OmWarH&)?2GKjOL!Quct%4p_P!$@mI%(nd zH@mp5sQ=XV-NL&KMg4$Uvx}5%6pv&a6hu?iI*W~u3iF?A_>(_SzbLQ}qh<7=Ibp2f ztyxRC;bh+TV_hVkj>mceonsi8!;0Q_Rjg|DQ?^#biwbJzy3N+efrwkYt%{53fg7_Q z@LO(%P9j>DI?vTVTovDxyIMt?)!P+Yk7emWQAzQj_fs*iP0brkY$YVPTsmj;Tsi(f zIcMVDKMb20@wO5=;G$KM-8;^^7M$w-b#+3Z!J(~?krSTNf?sI%vqROVAihgdL7Hlc zM!=x@*MxDcvVaY@JpFAxxT`+$7Bc+~(u*>}ro|;{pc^^OJEn2ora~fPqf;$DjJf3u za*w-^U7szybEkzjH~g{Kliyv=pIE#oa)cAh4yk1a`NgZQlty(GdyEw3UzHgl1wY}- zCkw1{)T&!@4!&F{hLu&+Nsq9r`H8=_%rkw3ZCEE+YOn?!9>UZIMthfjUocQKJK zd77{9pRR+fm7TVNSLrnTX==kGcsO(Xhk3HT#mD`ofAO~TGSNruIZtkr3Hd@%sA4_? z&GXZCyzN?!|A%QiBwMV>&vHZEP|d!CepzSS*^EaR&$nZDDFIRkl?1Wv2Vi3jP-bz0i2V2`0;q6eo!xEr(d36W#-|o zy>(j^8d3VrQ1jC#)ACz*;sAR2KfZFnKF-?1dq2CFbSxn3O6$9r1*;w*CE*craBUVu z!~ZbAKjCZE@cZS%=5=}P05GZ759;ZH8%`E{zBTI|a-Z(9e~w*i9r!`9eqTEP{1%l% zqCLeJE*C97m#xqnFPqx*&39!NzR2e`=DctCm{2jZj&t;67JZh(&@B*}ZP6cRn%q zl4qa$+ar0f`b3N#uaHtXO3pTw7VYBWq}!mtj|blIaA>(sr+&-&K#Pd#x)iVHf4Jr? z5IpiOL#yl;&f?qP)d65xeq$#+u&hMr{tFE%MzV-l0gQn4GGRy3OH1B&+L& zqzilDE0s+hKU1qaI$vKZI$3A#qz_(^mivBnpS92z{0l4JxO2z1d|+W@=cNdBf3}Ge zV+HfwtET4oN@rtlQgufh$iDp&%KBVsyX^(cYH~n>=v@FfrHF_^qz1K{9P$QKe;7ed ztHA@wB0eHE-=ft6(XnV^ctq~D?)gq19PWr0l#^o7-yM5{%`%Lo-b2LW;rH9BLi?F{ z%|4xVz)Rba4H}w?r=4k9MdpP+-RTVL=`t+C3^#`?^5;pECQ#dyY|i$*^zU`T<_GNZ zer)6pR^Bz23M`)Y3QEIy&p~eyoxD_e4|$B7E~2*KK1iwWO5vRkM+hn-DTCT2!RIg_ z;t{`D5h@Fn=bMHI#A5p8M>2!0NmdC1i`eMSGm=$-E&+jR2G{}}f5UUCP0cIC2bRv; z)A=`jD*N=;RYp*Oy*qJarB_s4Cs^_iQC|&pRfYWcIAr#H1OYSSdSx%YuE>Kq>VgGM zwLmL#-UFPh@%-!~;Hno`K{>YpXpDgW)*XJR`g~inhi6kn=pzL0Bk>lhX&0~Q82Fn; zppOxVC0SM-5kNWLUcF(WQisGzRjPzsOVr;sbM2 zW}n}_Gs8grhOsJdsW=P#AM>r>%=1rQ{03hHe(=@oqxE}YbGPreCx?OOaY;c|)%RSMOAsU4UrAmoI5+$YR* zZOxrkV^#KBA6|66c#1oXXECR)GuBTl)Jv`^S$O`PmR-`E^|95gjQ;&FDck8bqpe|C>h zEc_J2%JdVsHd`y5;%*NJnD`Ar*7Jr4(sU2ZG}XqtBv(D}3|`em8&T%Mm`YU6%ygow zl+W#T((6nZ)wqR|T5)hkts@(d(K#IJjGN;KWl( zJSgX}kN34E@~#IpGtq<8YxG)SAWv|Q>{{k-DCqQ;*m#7d;W$O}3Muq#oXCcVyg60{ z|M5_tn0dfHpotv69HHIxPhwETHx8HGt9n?zi(?lngvg(TW@A_GsE<1nbdO;rMLVve z!TkwyJOP%b^zWL4Fl2C4I}=A)L3W*7qgmXQ=eX}XOH&aAbNt&&33W6U-zfd&tY!Uv zi6dud$RF>M{HoH$z{`C1iJz(U9lyt1;P)|q_T*|_YguK?i=>Jx;~kd2E*E?fDj~r< z=iadhhNjm?dC;9lBwJlhEWjk zPp6v-J+ff+GTkvy!-I2DjB7>m=8wouuOS3=k>3N`3lSw+$Xl;s|JJyvBuKz=*8AE0 z86IQum%$}~&6_Ok1vZUM!@hF)k|^BZUFH;HzTXPY%$cU_vlMHMqp2&H#rS#9illnp z48Dugti={!6NhV76A{L^7aR3TrfV=_{vHg2dVB!CoG<3T>B#PQ=4u5mO?BVm%oU&< zGeaxzHutT07t85&kg)O$n`k>MjZG)fhmrlb_gqY&r`UrT;BNIYh!vYYN1*}jTbzg> zv?;p%w7=t~=dwKgknA5UI;e_S_I}m#04$_sOw&Zx^`P`1dhlKDJg-(S3WQDZ_L1Ro z!@Q*CO@$Y(!Paj>$$m71Q#;#E6pVEK z5sYYF0olO^#lP77i_yjNys}!xoEE}o&xL|=K&mB<=rH?mc1l%R`BghemQFdj{$r}{ z+SLilUmTw@4*Xl5q~Yu?=)mKdz*Gt@!|~w@1xrC6(yC=bhI+#&ryM_e!>1g}p9XGJ zk9h96%|N+K^p#5yo`{kM$kxDiMn}Cr`wsA4 zGmuO>hmuUGgE?~id;A{yT?Lx3zgweUr3A^-0G zN%CgBR`?^|X^w*L-y_`K%0LeUwv)i)uhkZ_zGwVbKn;*^SJPO6A(_FDfqX51#&z%8 z_pZ=ep#afb(72Imy~G);b$QIqM*HdVbzyq8%38c&xpyFkf$g?Jz1g9xuzxX+s3rhe z)L~5CzgW&a$ohZLqKPU7iGODOWBemRP!(B@ZodGhVFM;(I6|U2e#Y%%N}t3 z%K#+RyR6MxAewNx`EIWtlW`yJvSZea{8$a1sD+WadH%8@{h2=;q8pt~uOVCkznXqP z-!g6YLjm zcAXa7LH}7JJ)ONkITZRv&_-*d4By5<_LQ3r@|`TJBGT(pL>(fM7hxhhO?dL=Z{!TP z(gd$E(zGH#lIpxDr5&o>TL(f%i*n3TMKDAbMB$5P9-Ih8jJo7R*iEsC@i{7K2DgtO ze)Av(5t}vWJdDshKq9RNZO%{q01W5{q*`zoY_T+JrAx><|5rr*kF|N>D{EShKGwwW z@j>e8=EHkVv1pI02z%LSUHE}E@*4894AOuqV!S#Za~2W2J`kDoe$@hsfFQxpC;5Qw z!;kf-LF(Yuhkk5tQlD%A+$!|upOrhdo*MV7`s~tRLe3hKY3~KKQ^YP8@;P4S;ghj@ zfZRpPC7?s#5pDm50{kQJZIH%U`#ENB+p-Y^aouh?Ov=MxmMt2=opO2btyej1FebK> zio3xACR>e@M|i+RdBIa8Jr>TkBj1Znn8{i9{_6(^UI4KdJK@A8aM{62)S4lUmB!J9 zQqM5NaU)NSi#n< zI)DwNp9^3a2dkE^rVnDGcK@MYYiz2;RJR5Ov~_0h&y8#B(?mSvLyTZDz{}VfKE8z+ z6~u1@;^%+x>Ua%UH~E&F*d${2^4NdHRH$rkzH?OfkEdYujkaqs63`n_B8@btKK6E7 z5CkW0*U;#2^Z>fOLDuvm8i{Ui`|nXPLHbPFA+|vLnGkn%ydo`KC|NBZVhcGf4%fyw z%c*~bkMC#G7-Ex8;51Prs9go|kQZ?sIo*xuQ~akagh%893Os%UZs0%wcQIvB!vtD5 z^xgX`bjHPB^i}|<{$nQZ4981nb;?!nxmW0Rd3eX=h=A>5U&Yvg>xkdi5TXb>Iiypt zTn;*fGD2$pAIXa7gZ=TRueXxGu}P3>$!m%!U35 ztAA|$CZzCMyL4!g;}f#5%4NA@&UWPe-ebRDDm#lkEwAP~^ew;WbjYxquZsk1}n)5SUK+uzT$!SkZq}<` z`Dg5xwk-r9Tz3x+qy9628&qQ^S^iT&T-7p2Cd9n^%%IDD;2bZgSa}1U%9_@qr-Rxb z!+)~X5}$8WgWb8GabPXAZT~ujshk>C+`^&j+-JcZyh;J?J^{FV{DX>jzBes4dzuoy zz2Hu9xLrpp;kH}74*Ed@hr#WJ$j>TB3)yLJS<@Kw2gGIynz--ZwH(L*@%wJ(&q$x1 zTEjhNDKP=5VgE_CVq6!b!;n-9m#LHP_1|x0HWuWZJr<^M+d5KS#0ybHVMqPCUNPTv zk8s|2j0<6imdiswr;K2fO;<-|2DaxS`ego%BvsAK540t*j3(5rSn2!xvKGzE}Y6PULQoWF+WjDuIlJqtcJRp0GzI9ajR zq#TRo?5^9DMB=P@{WdR69kB$Hn!U_y112TFSM6O!+m-76QvirPphl{N4bBfnW5yEW z0a&Zr)~e~#b8xQteF05?Mb`apsyEBvZiV&Om!Q?%#H44spjFovCM?}$~KjTL^$edAl!bxW}A#W5=9(Ibv4SxDxz`VNQj z56g}&8(&Fn#~R=EmTKCJx^(HgoFyrP_ilgMjx%qra5-2}QIX0dwXWLo@({BSdzeFt zuwM}P9P?ZxW|^5&@qRa*;By=#PIrUAfJ2^Z=yk;A`f6KP<-zXJaM^cIW-Ap6^h)7| zOYR6B8Vr(>-r6O`A`@~!QcNb|pP|P+3!Calr3jOLDD+#F&v=>}#OfN+8hC8UY}f3% z>4~gAAwIH0Q1XffeaUEhF~OqpcI92eR(;#>_GwweJg?u-yF1hx3>68yst!*F7+bj3 zyexJzE5696+zO9g#1?ixQK8S2LH2-&9WTr@I2@*q08##a$P=-df&LuWPWj7B?i?yW zVFUwI)9l<|QLyhlnVqQjK~K(9NZr2cN$wN1#EjVAaz7J{`Dj>$he)d@RH@^~LHMjn zxcv@uXB1e$J2M@)VLBkfoN>rxT{R^+rMv+?srjs$?mqqRnxv4sV30mbI@_i9r0-Ub z4vi+iI>#|=wNBI{!9N%WSHy?mvJbueK3XzwE&08+wJph$9^vD4QfS$)L#4hdRCl-A z!1Ie!k>TK)4gf~i@()LBo4X0$dosZ>#v!4haiiX>utYIBU_7;ez`(ZG*wK2_ey32; z{WTT$)DN_^sYJgh^a%hNV`_enIf`ay0|6j-%!Md1Wb=C@%9deE$!6BRur6G?w}n6< zr@4raJ>i7RYUgmxYncH4DT9jj8qVE``r^hp8>frYfka)>9hvfqie*{>Z zyDIOk#4>sH2G3o`nCN=ZmTd$cQ%52);l+o<;j0z>Qg-^(P{Mv8vb(y4;+ zK$l6G=6ODgYiY3^Dk7 z&iFToe=DLkE84^8O%qxs#2QB2bHgLffwMB);Blz-ldJJ05h!}!&H*TRMkjoOn4?5hE$#-Dzl zWPbd}UF{pWbiYlPa_nncLbs6d!?*=4-R}uMQsh(iZB2fKhlQ8=Wf<}3=*MdTcZwdTSRW{G641MK)e^+u89ydM3x7& z+rl|1BRCP8C1|~rf4czaWl$Z<8TJhnlyO&|t=`<)_8)KoJ zm^|gljf1{9M81^Y$(*Yb8qjhe_453%ptVJt^=j^I*hjN`LW-vfR!NoD()peViq9q) zd8kY&ILFglc2(&XSh|@4CpIfP$AwFt1TDm6&!4szfBh5oPrmYg&inbQUE6Ea;_@Tk z?oq`47oIzxMA-8(n<>W(wmQ2#+#}1TYs14i4@p220$j{5y#KjjR`a4;#@i1)8Q$Gs zrs|MnbL6#^p?JF8SCeT1H0_VyYrQqyF_+*TH#`LU%J7sP+26-?zsFT=C9GhSlt={^ zfp<_vupmnGk(B6mIk>A<+n-a3Kvp?YQ|UUmf8iC3$sa4I#PW8T6)G*+RUDG`SZ_h1 z2@>5lta9hBEWwH3Q$ht<{&UP3nJ^OxV{YI&aH;49AG={`t{8I0%TmL^y^U8;;&ia# zxZk38tmJ`pmkwXqymeO$twyD^^3hC(g=LV=4{X1q(k0d4g8uZxh`= z!_F`dxF7ENti3H0l!AM`ymtw=hdgK^rQz9B5ohEPGYFte8PV<1CvX1lvmpm;K_ydG zo#~WOHGkSh&Bv;~dT(!4LaEK)OO)`L%HtF!Z&yqNr{~DX>1|^(c&}bm>wT{=4rK>o9|)nOXlXzNw?l>4d=_>#sf3$x2rZSw(D{js~wksc0$qNG3T#xG+iG` zFsO0gW1x08s>|fMPF#Irz{)FzDyvR&neK>HnzN9+w^pzDe)UL_|K!Tk3o}^##pQWI z^&ToRczY4+kQ_Ny6!Pvg5TQT-vW%|Cx4p;WQdNz0HtucwOHdr z{><+TRq3)7rJ7hsoRdStaP|D&?YP;c8HciSaZ}#*uQmj44O10L|4whEsY5xI?RZaM zx>+^zjlq_|p9C4_$9u1JoG*9iu=slk z5n^=c;Z6fXcf$d5rr%urwq=fYqn#h4tYuAV$Iy>-a%daj|V@C+OCBs$=N# zUF?>?{(X#(F^ZB}c$D78i+@zzK}71CL+C0Rt2AlN!}9$^>cDQ0{&U+eBK3H{KzKiZ zk)%_-y{_(14_Lsh;@8`cDwkfiVU+2ujZwOD$AOlGfql@VP!@q}uCf;B1&ocWVo6-@ zl<3^<@|dzQWHI_g_95PKBilD%VIhS94%z0l+i>u*n=+ z9#6}%-P>QFQ!oF=n%}pW5Qf2WwP+7^L>n?)?mzW0VhoT^^l%!c%R@2tYCS=Xpt*Aj zQ;1YJ2Um^DXG>rOWS4FPJUtDu#3TL;jA-->m-H=U_eW$O7PXkPeN6~IVmKhj$o8R& zwYc7HGZKF3g@W#Sx)p08fFA1rJ!*BOiho??pj4;-T$2>mvLiC)j~VRsW0s9y>ztHn z#OE2dteXfSD382=oc@4*P9A}T(=a1Q^^cPbF&lH3SG79mVtfZ;^$C*F=&mVDaPcjs z|K=h~)x=eO$sY&-&?zVw^d510+I{9B$$;>o8b6fV`&cFfdc8%fZtw#}I3Za?hVdv( zgA&q5cG?DE=lk{a!ar2eE32k{v13^EPs_J^{*KFCDui>3?RhO}y(zBdF(uS*rg#<4 zj45N6BQl3h$vNo6{PT^mp&yAhTF-?QUbOlm1oe<)$Z2=@GqQ*~@OW+npFx3C%uO-fOSD*0Y|y);f$G?NNi^dCuGpn4Mg73jDWp@P}diw+ai zD>sse|kYNEw{J+Nh zsFjzI;B1EoTR-Qy zy%IXK;&I-jIFQ}P>DfvpO@2!0&PPlDkNk)Y@ahsc&i~SiflsSBA0t=C90N6|lwn@` z#QbPvJVeX8-O^@1+4_EZLvF2G#rG!o&Aep?(y_dIQ?4W1K+I6s^xY>GZyPucc_#gO zvC5YPA&}Yl^W@WB(Siq{jmEGXKq)~AE|m58J$dzc^=*))8%^c1FFiDK`H^e^3)9{m1L=P_c~azMumzuFtVI%2er!SAzqeoy zY(ZDvyzJrF=s^b=o4>)T&&74^a7PjW{}6CY7`@m$><8{4WDp_(AFg;dZIxx1h`NS5 z5Hs}5&dC>YmpBwJvIbcCgnsmAa-WYBxGCp2cN|%?Z)m*!Bptf{C%?S6HhTj~8ORP& zm1G7Uq_1l>)Qqej%@&2^TGu^5cR@a0?z}@6=M-E!-(Lt387}} z=~E2`t3e^-2l4h|GU)p*Bup65XOb7_71a{0B>TFXG`rRZaHWP^a=HI zaA_(7;aX*lE~6wBZ?Va$iJq-gl_pD0jHo9HPNxc`_q`?8NLgH!*AP+F*c6g)Q_OSi zwwAQ$OuSd`ge~)iumGpO_Qh2 zgRpSvXJ}zdxvQihQpacAEz%~xyXgS4c9qU_6gw~;AYt>b^by5!3Z)k^Z;=kUn6u=@ z*MK);ZoGdp%rV*KN?d&M5Xi|aZnA9v(!!pO5>B*Hei*mle!n_LH21wkh0^D>=0_~U z#~pE>_0Okg*P5J4fXLK{N78xh)&UAO&%7Qm57%zIybrCzfb&)ewrV8zje}R5pAozz zPlj{aov;s?xV|iKpD}}ml+v4h*WAL<6|#gHfu*Ezl{mt){0;2t<<^5P^skD^fViqN zYidO!p~W8#g)$xrFvDiIsX6l36TtFRR*&-A(4a_X`F-EM{Ll>a8_3C>>e|uiPJW=Km`CnC~B+$k}Al;dW%=!%OsR`$5gyttUR8;g;N^? zwhlD>Gf50I%=j-214sLscM}snb}LsnrPuZdXoYQ_4nXE&SZcgZoo}Bm+Sl|P=Qig1 zG6${SEKZ+O(+njw7&y0DH-nt2XzWb2a**CZG1z$;BPUclkLv>*fdy#IkL+H-{Od^6 zWWP&qiOR0g5-1P0aX`|6sz6Pl*l<|2gE!%6G)O(kQ>`>CvV_%C8LZsO9fdg^!hEY} zC~C;GoF4oHIm$P|N@gB+vSiIg8d9WKKsqK$$F191|8mJT6UVstQ&M+|vd=6&ff8GgftmsJa2X#UQ(vDM`-#D67z?2!Zy1fAqMP)G+ z{s-ki%v)w*(27vV)I0=hlSWKGJjEKo!+6dttCEztNH*ojfrEk7lEA+K0d-jl^`qiT z;ZFo7wY@Q%@?*jkGY?0S*4vtGWdA`FH1892S_Fd<(5$@MGfWw_*~|mZ_$C-vMDgze z{~4}AJrY(3az+pukq2$KG{5^3{sB%XewjK!YLGI-2cqLOaa&zt{ZOyZf6=RDE!jpp}*PeI`R}=ma@Dw0P#%(|=n4fJqf%&Y3?|Jg>cI@38n|ku0GR3_Rgkwf&HhrO$e_|NtPAM#E1bNy@?`$*CHXCPvxIrGawTf{qJSir zde6c(IA5P=&d*Twz6z<4Yri!rbmhWlp+$y7Hf!}DD#vI)p~$wgEYtb9;nhB`iQRjl z;7?*-9$gr#l_Sxaq>~Nw!S!kru zW~#nBDPdQF(x%QW{3`)kH;6d{s35Kjte=0{P(#^##56Y+gf2ev42hd2MItM;+?Q9) z0`aN^+kkf|-ylxFb{3S#+bEOY-802$z+(V7Vk%;_Ja2pz?KUAgQj{aSL-mhQX2=l_ z^G#-n1E@}_1lQVziMa~t!8p-=jHnQU7azaf#uOPE@aP0(oBZIwYJfIk1eZ-#gg&c( zFMpDV$nEz&!{xJ`Cbp+6dHeSa9#xZ=!WW)1ygN^Idma5$<(@3)c4W((eu*KfeV1?y zpAo`!J55C_VW~DqS3X62cs^wF3^sKqbPxLC%`NwpKeX6Va~ze0^vx&Z)L+NDi*+G@ zhWWrF;js;lL7-3;Uix(?BV{=h6&(#}!P+{D^?J1oXmwNNqg>HJ<8a*S4@YvaC(sO`Lg4;}x%OME(;1ynP*>?xep`lcvoHld1T*^v#*2S6*Qb#0k$r zNp<#a$4|x=Gu^co0cOsPB>^WZqSW{hoHbj9(Q=6n_!6DJ!MVhr`S=2%_rPUCn+;b6c- zdPNivj@?z{_s$vJjFi>g^@Z6sO*TA!&Tu-FT*{)_*w$U=Fp*|W&9O^6mf#vQTBDRa7Hjb-fGgD*NM5Ckne8QS!|){O`_Hd*BUfJLCPg zBSe4suJtVfqX*EFepb0J?AbPs2v#F^poH9qCVc~~b+45N%He=@UoFu!w>gJ6yWY z&Um5x`jz>;KIX@#OceJBL4OZ*{srSWuZh>?$(^j=Kj8OSm!b!@8je!#7S+{K48r7n zM)#VRUn6i{=#NY?`w!60Ir!T($pPDH|5ZGgX5xxcnL_?>~w-ZVBY+J4Wt^ zm|AY66YwK(i15Nn>tIm*rRnx(hr=8fd8GT(xifc?7cY$NM9_l2=gvPX+^B&qrDDbW z8^m*_-jpWOPNwx94arz?D0s|ORddm>m*0EBG;&<1P)`S150{7AwGR?Tko_{^#U{s9Cln>sbsQZ&S)#k31?fl)~Vj$1tbCg ztopEMyS0|##sXGh&9BvbmP83PHtz%I+#DjeP_oCPZyT!{_bN)!2G9M5acHRi3r;v} z_l_M>wYA%AGt1Xgt~)VubO;!M_o4oF`v7KG&dTXQ|I7ODE^@Jx#_Mj^)9bQ>Qcai{ zy`6<>gelmig@wNHFhrcl->b=-VAKf4hgMd1*YUw)DhzgS4H#eLfr+Ja+E)h@k5yeR zBUXM;h#1~!5iO6jY6SX?@bQ3?7tx;-9|}lnQB(%L&eV$@hIm6U&OW#Nj9h2aK_Wl? zP@8PQM@be)hxDE}10Ad~a?>#rNkQN7J9$o*lk-`Cy-h?F;NNAXqMeBg*D#i#72@fl zMe~a9QJdO^AYQvfJispMFND9@VzNwM^|zX>;3tdRGJ?O|Vj4E@t&%WT3gt(fW0%fv zFEg>Sh_(_{@UX1CTtLpO6yI^=V`9sR(!=6-NXA&6-*$D73-4y}A?d5r+woJ(MF&MB zi79g}aXIViv0FI6xP}7H1inBcRyKjMRaz7Si^aKfWH*)MZr&&V_;K*G?at^Ql?lgd ztpg;hs1Z10{vQ!OxU@5gpzD838j2s#Kx~`X0Dj142ikOBRefO;{FvZt4Z9te zwD6AWynEVmT>-9v&}CO9!<08SHEh)CzdH4~M)a6_)n%zd(DN}kij~-^5qh{_g7n>8 zb7A(!tqX_v)IiYy-$)bO-WhSYnXbr&5q#cPYmXkz4Qb+wJpfDaSD}pfn+%f0EpW1- zE7wjQU2pWuJAIN^j!JpCtvVI~!mW9MFDs)rA@zqMr$#qXh5hOHmQ^RS9`G*YU9u;R ziRFy;PK$yT)zqr$1H>3@#~}LnC!;C|y0pGRTKxGOLk;i%E>BQu;DJ*ef7XG^C!;V8 z*?rp1LG*}n@$xvN>e1;f)wf0DNee>)dd17J`7Hkr><%&ZJX%J}SiSJ*w4(r4;|GZK zUnc!g?LWZUff^ksHNjj;uY89?!E@F$&5Bq7)l#z-7nl7f0eiW#P)o+Sh!YN43d83J zR_%M;^bmYzV}jVG540@h#s1gd`zYzUzyFZ@!(>BQKNI~_*ke`whgP^P@M*{m| zxUTy>5pHXuh9Dg?RYwlSV--Ol2}U6A^upw3X?9CG--amT;(NEQV5CnXaMLeypkV+J z2u@i_{UWovQ;dYzSkIDfIX)CWs^?;hRL6>{%CO^imB=u1A%S9tqM2au;tNny))|xi zK1NgLx0DWFe1G$Co_@sYq8U_pM<og3x zC>h64J!pvm?^kXlQyIYN#BjRji~Jv_cbfaR$Anm8f~Qr{dvE(4#oSaFImWrGEzV^4 za;BbU1WRur7Kd*~$QIMi!~}L`5Ue&k*$DB#xV-ar6NU4sSE{1-w&A2kk%(#w5M$~> zx^NMQbLg5rVc{joTA6&+MW%^3BE{2NVMP430rQ@!YW;qC1a1ktFl0XnjZd4M8vMOn zZU}*hsaMy@MZ<3vI-Z$JK6*zV@e2NSwgZz1i54MyZu_LvxgMP z70-ujP<9xBFUzvc4d}%3yvarR?58T-nyM;;*sEDEy9lcWdk_ohUYqOIiexfFp797u zzM}-<33=5q+8 z5g5~PS3}0f|K(x)2!jKCzYf(Hm2iwS3OrZ)H1tJE?}=5_rToO<+~-ZEBeISdBYA2#dxm9^du}lZu32O_ zW;c6H+(n`d@bOj+#og!3)2yf!JS}TGb1y1Cz(n2&>U?TU9Oi+tbOoXA3|uIJi!|`) z{a@oTWsb;papq~fxv}$gF1C^FLnJ=PMgOMo4=H303`b745v-vM865UUk-jTFs?QCG z6f31ML?gh8Q^DQRJWW>=DCu2*cCADTF~zLUvro?r1bJXZD}j0C4|NXgRn><5K}de8 z<<1oS#Trt)9AoFnf{u8m&d+$fA;@7bas=HqaY+Z`qx)ZG*2)}gc^IKoR&?c8UzO7R zQy!eZ!!1-NX#*SqPC?|lGk87&FxU-L19vWC7(C{2m=!gnH-VsNUB8l~BJWd!C4E(u z17hJWw`yUiNoO+NO4A6b%~)o5=1)fQofRO$Q)N`3suG_^-Q zl>(yRYk!mISK_odW{)Rr%+)z|A2V`%F*~Coqkby3jhe#<)~7Erk5|K{voV1b6HnWR z!<&UZkI(D+Y5bs1b~?imTP=a9zCsN_yppif3VoykUu|0d6ZltC9)Qa=!Vrhx2*ts> zz>}z6oA*_7SkqA!_TIN;aI>3x!1q-V)pBho%vb_Yu&QaeHW=T|(^o)L_|?}N8il93 zNw-b?9=sVA2)QGcD62JB_kW6=Aa(qP6*#hPsREzO6Ia&7xC*A%v)OeZwu$f2@+vz_ zMXcI}Z8VF;j7`;K0=k*N_I+H^OJaDk*9=5Scv2n{^Fpa04c>sEQ#6;_Ltb_FN$6fRLWn;Dg0_&V(SfI zdS_@!8Xf8%Mo5Uu1lK6THhyP#9-)veZxMs_A)W6-3pv90oPd%^ufD*g?&$#<=mm|N zHcwvmta9xK2AIJ&Pm?#l$8MrN^{%1Rub~hsx4UFDH}g(yT)3H5WbQz>)f%_FYjzsNqdP+;ChP9T!oNw6*tzoA3x&)=#bqY z)vi*6$3C^2otwAu)ONuO==YCh9agsWTq5?zR@QA29h-)g9a~J+Uv+n+`*JJc%R?f6 z!h(Y&T^~PcuqQ%iehZm747+vYr!d3V{c#D$+R4LnlBd}reC|!1 ze)YZt+(pr*#F3O{ajR73dfd%Rv>c|qco%DE{+BD%fS=?SxFKMm}k zQdF7v@Udyh{drGLbycp!@J+(AG5)6P%2$TnDGNjGr}O=-Ln6g%NX}{BPl(TpA}z%x z?0K2pwz>DA-$hrMzU?dn#eVl{ZNdA~(rcy-tu~Tw>GeV3q4wiwXk|lAPG8-Tf{}iA z%Ff2lq5k4C&f+ifG}FB%&ruR<%R?IWMO#LWc=U6#x;Vaidw8km!^nT>=H!|}8BFJ0 z2prq8ftT>flKqzw8KY1!=Ec021fO^2ea1dIb{rYk9;@VY&Wc$d1ziYllga8_p)Y)> zYcsuZg$RB0fIU{%qf|DvYb$12#^{}f&S6Ovu!EwqzE?)s;7-f~>kRbQ5aT>*gWs~g zzS6Q5xGwe1VrLl7&8QPl0?m^_RLQ=OeZj`}Fjz2|7UP4Dv37 zggowsJ2|w4wHq-kNUw$W81<1Q_w^@gdMkHbX@AWU6+Nj%4=yk*=PZV}oXo}R^=jKF zgnK+L*&5@yKtDH*XynF9J^yp}tHgkF@`)^A{H3=?^ho@W8%(NUx6XD@HJ=8CcL<6CkBJv`I9T^`#7Ej_PT;HYOMbe{lGGdhN~bWUATzr_&F+I; zNmhYTY)s;IdO`Wz?$=j3YL_4PL{E0THFVK@{fGMetG31`>*gEm_;VT|ckoaA#O6Eu&GH?yGKARe zV-IX*w=i-+b#c7|0x5&D8PG;`QwrE4!CO2I4hYLrj-Ij^iN2p^n zx(@Z*b~Og@1cEGdJjUV?83PGlr<%&5H(*c-0v^n`Cqe=vLKZ@Iw;N5VCjLzPBhqN-6+y_9!FGY_d{&ui) -

<div><br></div>

<div><br></div>
Monasca Client (CLI)
Monasca Client (CLI)
Horizon,
Monitoring Dashboard
Horizon,<div>Monitoring Dashboard</div>
system being
monitored
[Not supported by viewer]
Monasca Agent
Monasca Agent
Notification Engine
Notification Engine
Threshold Engine
Threshold Engine
Persister
Persister


Config
Database
[Not supported by viewer]
Message Queue
<<Kafka>>
[Not supported by viewer]


Measurements Database
[Not supported by viewer]
Monasca API
[Not supported by viewer]
Transform Engine
[Not supported by viewer]
\ No newline at end of file diff --git a/doc/source/admin/index.rst b/doc/source/admin/index.rst deleted file mode 100644 index 0e041340d..000000000 --- a/doc/source/admin/index.rst +++ /dev/null @@ -1,89 +0,0 @@ -==================== -Administration guide -==================== - -.. toctree:: - :maxdepth: 2 - -Schema Setup -~~~~~~~~~~~~ - -For setting up the Monasca configuration database, we provide ``monasca_db``, -an Alembic based database migration tool. Historically, the schema for the -configuration database was created by a SQL script. This SQL was changed a -couple of times, so ``monasca_db`` comes with a mechanism to detect the SQL -script revision being used to create it and stamp the database with the -matching Alembic revision. - -Setting up a new database -------------------------- - -If you are deploying Monasca from scratch, database setup is quite -straightforward: - -1. Create a database and configure access credentials with ``ALL PRIVILEGES`` - permission level on it in the Monasca API configuration file's - ``[database]`` section. - -2. Run schema migrations: ``monasca_db upgrade``. It will run all migrations up - to and including the most recent one (``head``) unless a revision to migrate - to is explicitly specified. - - -Upgrading Existing Database from Legacy Schema ----------------------------------------------- - -If you have been running an older version of Monasca, you can attempt to -identify and stamp its database schema: - -:: - - monasca_db stamp --from-fingerprint - -This command will generate a unique fingerprint for the database schema in -question and match that fingerprint with an in-code map of fingerprints to -database schema revisions. This should work for all official (shipped as part -of the ``monasca-api`` repository) schema scripts. If you used a custom -third-party schema script to set up the database, it may not be listed and -you'll get an error message similar to this one (the fingerprint hash will -vary): - -:: - - Schema fingerprint 3d45493070e3b8e6fc492d2369e51423ca4cc1ac does not match any known legacy revision. - -If this happens to you, please create a Storyboard story against the -`openstack/monasca-api project `_. -Provide the following alongside the story: - -1. A copy of or pointer to the schema SQL script being used to set up the - database. - -2. The fingerprint shown in the error message. - -3. The output of ``monasca_db fingerprint --raw``. - -Time Series Databases Setup -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Enabling InfluxDB Time Series Index in existing deployments ------------------------------------------------------------ - -If enabling TSI on an existing InfluxDB install please follow the instructions -for migrating existing data here: -https://docs.influxdata.com/influxdb/v1.7/administration/upgrading/#upgrading-influxdb-1-3-1-4-no-tsi-preview-to-1-7-x-tsi-enabled - -Database Per Tenant -------------------- - -It is envisaged that separate database per tenant will be the default -behaviour in a future release of Monasca. Not only would it make queries -faster for tenants, it would also allow administrators to define -retention policy per tenancy. To enable this, set -`influxdb.db_per_tenant` to `True` in `monasca-{api,persister}` config -(it defaults to `False` at the moment if not set). - -To migrate existing data to database per tenant, refer to README.rst -under the following URL which also contains the Python script to -facilitate migration: -https://opendev.org/openstack/monasca-persister/src/branch/master/monasca_persister/tools/db-per-tenant/ diff --git a/doc/source/cli/index.rst b/doc/source/cli/index.rst deleted file mode 100644 index 37a38160b..000000000 --- a/doc/source/cli/index.rst +++ /dev/null @@ -1,53 +0,0 @@ -====================== -Command Line Interface -====================== - -monasca (python-monascaclient) -============================== -This is the main command line interface for working with the -Monasca services, including retrieving metrics from storage. - -See the https://docs.openstack.org/python-monascaclient/latest/ for details. - - -monasca_db -========== -CLI for Monasca database management. -:: - - usage: api [-h] [--config-dir DIR] [--config-file PATH] [--version] - {fingerprint,detect-revision,stamp,upgrade,version} ... - - -monasca-status -============== -CLI for checking the status of Monasca. - -Use the command `monasca-status upgrade check` to check -the readiness of the system for an upgrade. - -**Return Codes** - - .. list-table:: - :widths: 20 80 - :header-rows: 1 - - * - Return code - - Description - * - 0 - - All upgrade readiness checks passed successfully and there is nothing - to do. - * - 1 - - At least one check encountered an issue and requires further - investigation. This is considered a warning but the upgrade may be OK. - * - 2 - - There was an upgrade status check failure that needs to be - investigated. This should be considered something that stops an - upgrade. - * - 255 - - An unexpected error occurred. - -**History** - -Introduced in the Stein cycle as part of the OpenStack Community wide goal. -https://governance.openstack.org/tc/goals/stein/upgrade-checkers.html diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index ed1c6b078..000000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# -# monasca-api documentation build configuration file, created by -# sphinx-quickstart on Wed Nov 18 12:02:03 2015. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import sys - -sys.path = [ - os.path.abspath('../..'), - os.path.abspath('../../bin') -] + sys.path - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.6' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.coverage', - 'sphinx.ext.ifconfig', - 'sphinx.ext.graphviz', - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', - 'oslo_config.sphinxconfiggen', - 'oslo_config.sphinxext', - 'openstackdocstheme', -] - -# geeneral information about project -openstackdocs_repo_name = u'openstack/monasca-api' -openstackdocs_pdf_link = True -openstackdocs_use_storyboard = True -copyright = u'2014-present, OpenStack Foundation' -author = u'OpenStack Foundation' - -# sample config -config_generator_config_file = [ - ('config-generator/monasca-api.conf', '_static/monasca-api') -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - 'common', - 'doc', - 'documentation', - 'etc', - 'java' -] - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -show_authors = True - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# A list of ignored prefixes for module index sorting. -modindex_common_prefix = ['monasca_api.', 'monasca'] - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'openstackdocs' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# doc. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -html_use_index = True - -# If false, no module index is generated. -html_use_modindex = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'MonitoringApiDoc' - -# -- Options for LaTeX output --------------------------------------------- - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'doc-monasca-api.tex', u'Monasca Documentation', - [author], 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -latex_domain_indices = False - -latex_elements = { - 'makeindex': '', - 'printindex': '', - 'preamble': r'\setcounter{tocdepth}{3}', -} - -# Disable usage of xindy https://bugzilla.redhat.com/show_bug.cgi?id=1643664 -latex_use_xindy = False - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'monitoringapi', u'Monasca Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://doc.python.org/': None} diff --git a/doc/source/configuration/sample.rst b/doc/source/configuration/sample.rst deleted file mode 100644 index e2c7db344..000000000 --- a/doc/source/configuration/sample.rst +++ /dev/null @@ -1,40 +0,0 @@ -.. _sample-configuration: - -------- -Samples -------- - -The following sections show sample configuration files for monasca-api and -related utilities. These are generated from the code -(apart from the samples for logging and paster) and reflect the current state -of code in the monasca-api repository. - - -.. _sample-configuration-api: - -Sample Configuration For Application ------------------------------------- - -This sample configuration can also be viewed in `monasca-api.conf.sample -<../_static/monasca-api.conf.sample>`_. - -.. literalinclude:: ../_static/monasca-api.conf.sample - -.. _sample-configuration-logging: - -Sample Configuration For Logging --------------------------------- - -This sample configuration can also be viewed in `api-logging.conf -`_. - -.. literalinclude:: ../../../etc/api-logging.conf - - -Sample Configuration For Paster -------------------------------- - -This sample configuration can also be viewed in `api-config.ini -`_. - -.. literalinclude:: ../../../etc/api-config.ini diff --git a/doc/source/contributor/.gitignore b/doc/source/contributor/.gitignore deleted file mode 100644 index 13f025dd0..000000000 --- a/doc/source/contributor/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# codebase documentation is autogenerated -# do not track it -api/ diff --git a/doc/source/contributor/code.rst b/doc/source/contributor/code.rst deleted file mode 100644 index 4248dfafa..000000000 --- a/doc/source/contributor/code.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. _codedocs: - -====================== -Codebase documentation -====================== - -Following section contains codebase documenation generated with, a little -bit of assistance, `sphinx.ext.autodoc`_. - -.. _`sphinx.ext.autodoc`: http://www.sphinx-doc.org/en/stable/ext/autodoc.html - -Modules -======= - -.. toctree:: - :maxdepth: 2 diff --git a/doc/source/contributor/contributing.rst b/doc/source/contributor/contributing.rst deleted file mode 100644 index 19bf32436..000000000 --- a/doc/source/contributor/contributing.rst +++ /dev/null @@ -1,188 +0,0 @@ -============================ -So You Want to Contribute... -============================ - -For general information on contributing to OpenStack, please check out the -`contributor guide `_ to get started. -It covers all the basics that are common to all OpenStack projects: the -accounts you need, the basics of interacting with our Gerrit review system, -how we communicate as a community, etc. - -Below will cover the more project specific information you need to get started -with Monasca. - -Communication -~~~~~~~~~~~~~ -.. This would be a good place to put the channel you chat in as a project; when/ - where your meeting is, the tags you prepend to your ML threads, etc. - -For communicating with Monasca Team, you can reach out to us on -*#openstack-monasca* IRC channel at OFTC. - -We hold weekly `team meetings`_ in our IRC channel which is a good opportunity -to ask questions, propose new features or just get in touch with the team. - -You can also send us an email to the mailing list -`openstack-discuss@lists.openstack.org`_. Please use *[Monasca]* tag for -easier thread filtering. - -.. _team meetings: http://eavesdrop.openstack.org/#Monasca_Team_Meeting -.. _openstack-discuss@lists.openstack.org: http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-discuss - -Contacting the Core Team -~~~~~~~~~~~~~~~~~~~~~~~~ -.. This section should list the core team, their irc nicks, emails, timezones - etc. If all this info is maintained elsewhere (i.e. a wiki), you can link to - that instead of enumerating everyone here. - -================== ========== ===== -Name IRC nick Email -================== ========== ===== -Martin Chacon Piza chaconpiza MartinDavid.ChaconPiza1@est.fujitsu.com -Witek Bedyk witek witold.bedyk@suse.com -Doug Szumski dougsz doug@stackhpc.com -Adrian Czarnecki adriancz adrian.czarnecki@ts.fujitsu.com -Joseph Davis joadavis joseph.davis@suse.com -================== ========== ===== - -New Feature Planning -~~~~~~~~~~~~~~~~~~~~ -.. This section is for talking about the process to get a new feature in. Some - projects use blueprints, some want specs, some want both! Some projects - stick to a strict schedule when selecting what new features will be reviewed - for a release. - -Our process is meant to allow users, developers, and operators to express their -desires for new features using Storyboard stories. The workflow is very simple: - -* If something is clearly broken, submit a `bug report`_ in Storyboard. -* If you want to change or add a feature, submit a `story`_ in Storyboard. -* Monasca core reviewers may request that you submit a `specification`_ to - gerrit to elaborate on the feature request. -* Significant features require `release notes`_ to be included when the code is - merged. - -.. _story: - -Stories -------- - -New features can be proposed in `Storyboard -`_ as new Story. - -The initial story primarily needs to express the intent of the idea with -enough details that it can be evaluated for compatibility with the project -mission and whether or not the change requires a `specification`_. It is *not* -expected to contain all of the implementation details. If the feature is very -simple and well understood by the team, then describe it simply. The story is -then used to track all the related code reviews. Team members will -request more information as needed. - -.. _specification: - -Specifications --------------- - -We use the `monasca-specs `_ -repository for specification reviews. Specifications: - -* Provide a review tool for collaborating on feedback and reviews for complex - features. -* Collect team priorities. -* Serve as the basis for documenting the feature once implemented. -* Ensure that the overall impact on the system is considered. - -.. _release notes: - -Release Notes -------------- - -The release notes for a patch should be included in the patch. If not, the -release notes should be in a follow-on review. - -If any of the following applies to the patch, a release note is required: - -* The deployer needs to take an action when upgrading -* A new feature is implemented -* Plugin API function was removed or changed -* Current behavior is changed -* A new config option is added that the deployer should consider changing from - the default -* A security bug is fixed -* Change may break previous versions of the client library(ies) -* Requirement changes are introduced for important libraries like oslo, six - requests, etc. -* Deprecation period starts or code is purged - -A release note is suggested if a long-standing or important bug is fixed. -Otherwise, a release note is not required. - -Task Tracking -~~~~~~~~~~~~~ -.. This section is about where you track tasks- launchpad? storyboard? is there - more than one launchpad project? what's the name of the project group in - storyboard? - -We track our tasks in Storyboard - -https://storyboard.openstack.org/#!/project_group/monasca - -If you're looking for some smaller, easier work item to pick up and get started -on, search for the *'low-hanging-fruit'* tag. - -Kanban Board ------------- - -Progress on implementation of important stories in Ussuri release is tracked in -`Monasca Board on StoryBoard `_. - -.. _bug report: - -Reporting a Bug -~~~~~~~~~~~~~~~ -.. Pretty self explanatory section, link directly to where people should report - bugs for your project. - -You found an issue and want to make sure we are aware of it? You can `report -them on Storyboard `_. - -When filing a bug please remember to add the *bug* tag to the story. Please -provide information on what the problem is, how to replicate it, any -suggestions for fixing it, and a recommendation of the priority. - -All open bugs can be found in this `Worklist -`_. - -Getting Your Patch Merged -~~~~~~~~~~~~~~~~~~~~~~~~~ -.. This section should have info about what it takes to get something merged. Do - you require one or two +2's before +W? Do some of your repos require unit - test changes with all patches? etc. - -All changes proposed to Monasca requires at least one ``Code-Review +2`` votes -from Monasca core reviewers before one of the core reviewers can approve -patch by giving ``Workflow +1`` vote. - -Reviews Prioritisation ----------------------- - -Monasca project uses *Review-Priority* field in Gerrit to emphasize -prioritized code changes. - -Every developer can propose the changes which should be prioritized -in `weekly team meeting `_ -or in the mailing list. Any core reviewer, -preferably from a different company, can confirm such proposed change -by setting *Review-Priority* +1. - -Prioritized changes can be listed in this -`dashboard `_. - -Project Team Lead Duties -~~~~~~~~~~~~~~~~~~~~~~~~ -.. this section is where you can put PTL specific duties not already listed in - the common PTL guide (linked below), or if you already have them written - up elsewhere you can link to that doc here. - -All common PTL duties are enumerated in the `PTL guide -`_. diff --git a/doc/source/contributor/db_migrations.rst b/doc/source/contributor/db_migrations.rst deleted file mode 100644 index bee6ce705..000000000 --- a/doc/source/contributor/db_migrations.rst +++ /dev/null @@ -1,17 +0,0 @@ -Database Migrations -------------------- - -Monasca uses `Alembic `_ -migrations to set up its configuration database. If you need to change the -configuration database's schema, you need to create a migration to adjust the -database accordingly, as follows:: - - cd monasca_api/db/ - alembic revision - -This will create a new skeleton revision for you to edit. You will find -existing revisions to use for inspiration in the -``/monasca_api/db/alembic/versions/`` directory. - -Measurement data stored in a Time Series database (such as InfluxDB) would -be migrated to a new version using standard practice for a given TSDB. diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst deleted file mode 100644 index 8fcda36cb..000000000 --- a/doc/source/contributor/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -========================== -Contribution documentation -========================== - -.. toctree:: - :maxdepth: 1 - - contributing.rst - db_migrations.rst - code.rst diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst deleted file mode 100644 index fa106d194..000000000 --- a/doc/source/glossary.rst +++ /dev/null @@ -1,3 +0,0 @@ -======== -Glossary -======== diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index 0a9a49889..000000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,149 +0,0 @@ -.. - monasca-api documentation master file - Copyright 2017 FUJITSU LIMITED - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -=================================== -Welcome to Monasca's Documentation! -=================================== - -The monitoring requirements in OpenStack environments are vast, varied, and -highly complex. Monasca's project mission is to provide a -monitoring-as-a-service solution that is multi-tenant, highly scalable, -performant, and fault-tolerant. Monasca provides an extensible platform for -advanced monitoring that can be used by both operators and tenants to gain -operational insights about their infrastructure and applications. - -Monasca uses REST APIs for high-speed metrics, logs processing and querying. It -integrates a streaming alarm engine, a notification engine and an aggregation -engine. - -The use cases you can implement with Monasca are very diverse. Monasca follows -a micro-services architecture, with several services split across multiple -repositories. Each module is designed to provide a discrete service in the -overall monitoring solution and can be deployed or omitted according to -operators/customers needs. - -Architecture -============ - -The following illustration provides an overview of Monasca's metrics pipeline -and the interaction of the involved components. For information on Monasca's -log pipeline, refer to -`this wiki page `_. - -.. image:: /_static/images/architecture.png - :width: 900 px - -Repositories ------------- - -* `monasca-api `_: - RESTful API for metrics, alarms, and notifications. -* `monasca-agent `_: - Agent for retrieving metrics data. -* `monasca-persister `_: - Writes metrics and alarm state transitions to a time-series database. -* `monasca-thresh `_: - Thresholding engine for computing thresholds on metrics and determining alarm - states. -* `monasca-notification `_: - Pluggable notification engine for consuming alarm state transitions and - sending notifications for alarms. -* `monasca-transform `_: - Aggregation engine based on Apache Spark. -* `monasca-aggregator `_: - Light-weight metrics aggregator. - -Apart from sending requests directly to the API, the following tools are -available for interacting with Monasca: - -* `Monasca Client `_: - CLI and Python client. -* `Horizon plugin `_: - Plugin adding the monitoring panel to Horizon. -* `Grafana app `_: - Plugin for Grafana to view and configure alarm definitions, alarms, and - notifications. - -Libraries: - -* `monasca-common `_: - Common code used in the Monasca components. -* `monasca-statsd `_: - StatsD-compatible library for sending metrics from instrumented applications. - -Grafana__ integration: - -* `monasca-grafana-datasource - `_: - Multi-tenant Monasca data source for Grafana. -* `grafana `_: - Forked version of Grafana 4.1.2 with Keystone authentication added. - -__ https://grafana.com/ - -For Contributors -================ - -.. toctree:: - :maxdepth: 1 - - contributor/index - -For Operators -================ - -Administrating --------------- - -.. toctree:: - :maxdepth: 1 - - admin/index - -Glossary -------------- -.. toctree:: - :maxdepth: 2 - - glossary - -Installation ------------- - -.. toctree:: - :maxdepth: 2 - - install/index - -User ------------- - -.. toctree:: - :maxdepth: 2 - - user/index - -Configuration -------------- - -* :doc:`Sample Config Files ` - -.. toctree:: - :hidden: - - admin/index - cli/index - configuration/sample diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst deleted file mode 100644 index eeb01df82..000000000 --- a/doc/source/install/index.rst +++ /dev/null @@ -1,6 +0,0 @@ -============ -Installation -============ - -.. toctree:: - :maxdepth: 2 diff --git a/doc/source/user/index.rst b/doc/source/user/index.rst deleted file mode 100644 index 8da48ac21..000000000 --- a/doc/source/user/index.rst +++ /dev/null @@ -1,6 +0,0 @@ -========== -User guide -========== - -.. toctree:: - :maxdepth: 2 diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index d8b109f86..000000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,79 +0,0 @@ -ARG DOCKER_IMAGE=monasca/api -ARG APP_REPO=https://review.opendev.org/openstack/monasca-api - -# Branch, tag or git hash to build from. -ARG REPO_VERSION=master -ARG CONSTRAINTS_BRANCH=master - -# Extra Python3 dependencies. -# gevent is not in upper constrains and v1.3.6 is not working with -# older greenlet. -ARG EXTRA_DEPS="gunicorn gevent>=21.12.0 python-memcached influxdb" - -# Always start from `monasca-base` image and use specific tag of it. -ARG BASE_TAG=master -FROM monasca/base:$BASE_TAG - -# Environment variables used for our service or wait scripts. -ENV \ - KAFKA_URI=kafka:9092 \ - KAFKA_WAIT_FOR_TOPICS=alarm-state-transitions,metrics \ - KAFKA_LEGACY_CLIENT_ENABLED=false \ - MONASCA_CONTAINER_API_PORT=8070 \ - DATABASE_BACKEND=influxdb \ - INFLUX_HOST=influxdb \ - INFLUX_PORT=8086 \ - INFLUX_USER=mon_api \ - INFLUX_PASSWORD=password \ - INFLUX_DB=mon \ - CASSANDRA_CONTACT_POINTS=cassandra \ - CASSANDRA_PORT=9042 \ - CASSANDRA_KEY_SPACE=monasca \ - CASSANDRA_USER=mon_persister \ - CASSANDRA_PASSWORD=password \ - CASSANDRA_CONNECTION_TIMEOUT=5 \ - MYSQL_HOST=mysql \ - MYSQL_PORT=3306 \ - MYSQL_USER=monapi \ - MYSQL_PASSWORD=password \ - MYSQL_DB=mon \ - MEMCACHED_URI=memcached:11211 \ - DEFAULT_REGION=RegionOne \ - KEYSTONE_IDENTITY_URI=http://keystone:35357 \ - KEYSTONE_AUTH_URI=http://keystone:5000 \ - KEYSTONE_ADMIN_USER=admin \ - KEYSTONE_ADMIN_PASSWORD=secretadmin \ - KEYSTONE_ADMIN_TENANT=admin \ - KEYSTONE_ADMIN_DOMAIN=default \ - KEYSTONE_INSECURE=false \ - GUNICORN_WORKERS=9 \ - GUNICORN_WORKER_CLASS=gevent \ - GUNICORN_WORKER_CONNECTIONS=2000 \ - GUNICORN_BACKLOG=1000 \ - GUNICORN_TIMEOUT=10 \ - ADD_ACCESS_LOG=true \ - ACCESS_LOG_FORMAT="%(asctime)s [%(process)d] gunicorn.access [%(levelname)s] %(message)s" \ - ACCESS_LOG_FIELDS='%(h)s %(l)s %(u)s %(t)s %(r)s %(s)s %(b)s "%(f)s" "%(a)s" %(L)s' \ - LOG_LEVEL_ROOT=WARN \ - LOG_LEVEL_CONSOLE=INFO \ - LOG_LEVEL_ACCESS=INFO \ - STAY_ALIVE_ON_FAILURE=false \ - ENABLE_METRICS_API=true \ - ENABLE_LOGS_API=false - -# Copy all neccessary files to proper locations. -COPY api-* /etc/monasca/ -COPY monasca-api.conf.j2 /etc/monasca/ - -# Run here all additionals steps your service need post installation. -# Stay with only one `RUN` and use `&& \` for next steps to don't create -# unnecessary image layers. Clean at the end to conserve space. -#RUN \ -# echo "Some steps to do after main installation." && \ -# echo "Hello when building." - -# Expose port for specific service. -EXPOSE ${MONASCA_CONTAINER_API_PORT} - -# Implement start script in `start.sh` file. -CMD ["/start.sh"] diff --git a/docker/README.rst b/docker/README.rst deleted file mode 100644 index 8054fc077..000000000 --- a/docker/README.rst +++ /dev/null @@ -1,119 +0,0 @@ -============================ -Docker image for Monasca API -============================ -The Monasca API image is based on the monasca-base image. - - -Building monasca-base image -=========================== -See https://github.com/openstack/monasca-common/tree/master/docker/README.rst - - -Building Monasca API image -========================== - -Example: - $ ./build_image.sh - -Everything after ``./build_image.sh`` is optional and by default configured -to get versions from ``Dockerfile``. ``./build_image.sh`` also contain more -detailed build description. - -Environment variables -~~~~~~~~~~~~~~~~~~~~~ -============================== ======================================================================= ========================================== -Variable Default Description -============================== ======================================================================= ========================================== -KAFKA_URI kafka:9092 URI to Apache Kafka (distributed streaming platform) -KAFKA_LEGACY_CLIENT_ENABLED false Enable legacy Kafka client -MONASCA_CONTAINER_API_PORT 8070 The port from the metric pipeline endpoint -DATABASE_BACKEND influxdb Select for backend database -INFLUX_HOST influxdb The host for influxdb -INFLUX_PORT 8086 The port for influxdb -INFLUX_USER mon_api The influx username -INFLUX_PASSWORD password The influx password -INFLUX_DB mon The influx database name -CASSANDRA_CONTACT_POINTS cassandra Cassandra node addresses -CASSANDRA_PORT 9042 Cassandra port number -CASSANDRA_KEY_SPACE monasca Cassandra keyspace where metric are stored -CASSANDRA_USER mon_persister Cassandra user name -CASSANDRA_PASSWORD password Cassandra password -CASSANDRA_CONNECTION_TIMEOUT 5 Cassandra timeout in seconds when creating a new connection -MYSQL_HOST mysql The host for MySQL -MYSQL_PORT 3306 The port for MySQL -MYSQL_USER monapi The MySQL username -MYSQL_PASSWORD password The MySQL password -MYSQL_DB mon The MySQL database name -API_MYSQL_DISABLED unset If 'true' do not use a mysql database. Only metric API will work -MEMCACHED_URI memcached:11211 URI to Keystone authentication cache -DEFAULT_REGION RegionOne Region that API is running in -AUTHORIZED_ROLES admin,domainuser,domainadmin,monasca-user Roles for Monasca users (full API access) -AGENT_AUTHORIZED_ROLES monasca-agent Roles for Monasca agents (sending data only) -READ_ONLY_AUTHORIZED_ROLES monasca-read-only-user Roles for read only users -DELEGATE_AUTHORIZED_ROLES admin Roles allow to read/write cross tenant ID -KEYSTONE_IDENTITY_URI http://keystone:35357 URI to Keystone admin endpoint -KEYSTONE_AUTH_URI http://keystone:5000 URI to Keystone public endpoint -KEYSTONE_ADMIN_USER admin OpenStack administrator user name -KEYSTONE_ADMIN_PASSWORD secretadmin OpenStack administrator user password -KEYSTONE_ADMIN_TENANT admin OpenStack administrator tenant name -KEYSTONE_ADMIN_DOMAIN default OpenStack administrator domain -KEYSTONE_INSECURE false Allow insecure Keystone connection -KEYSTONE_REGION_NAME undefined Keystone admin account region -GUNICORN_WORKERS 9 Number of gunicorn (WSGI-HTTP server) workers -GUNICORN_WORKER_CLASS gevent Used gunicorn worker class -GUNICORN_WORKER_CONNECTIONS 2000 Number of gunicorn worker connections -GUNICORN_BACKLOG 1000 Number of gunicorn backlogs -GUNICORN_TIMEOUT 10 Gunicorn connection timeout -ADD_ACCESS_LOG false Enable gunicorn request/access logging -ACCESS_LOG_FORMAT "%(asctime)s [%(process)d] gunicorn.access [%(levelname)s] %(message)s" Define the logging format -ACCESS_LOG_FIELDS '%(h)s %(l)s %(u)s %(t)s %(r)s %(s)s %(b)s "%(f)s" "%(a)s" %(L)s' Define the fields to be logged -LOG_LEVEL_ROOT WARN Log level for root logging -LOG_LEVEL_CONSOLE INFO Log level for console logging -LOG_LEVEL_ACCESS INFO Log level for access logging -STAY_ALIVE_ON_FAILURE false If true, container runs 2 hours after service fail -ENABLE_METRICS_API true Enable/Disable metrics endpoints -ENABLE_LOGS_API false Enable/disable logs endpoints -============================== ======================================================================= ========================================== - -Wait scripts environment variables -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -======================== ================================ ========================================= -Variable Default Description -======================== ================================ ========================================= -KAFKA_URI kafka:9092 URI to Apache Kafka -KAFKA_WAIT_FOR_TOPICS alarm-state-transitions,metrics The topics where metric-api streams - the metric messages and alarm-states -KAFKA_WAIT_RETRIES 24 Number of kafka connect attempts -KAFKA_WAIT_DELAY 5 Seconds to wait between attempts -MYSQL_HOST mysql The host for MySQL -MYSQL_PORT 3306 The port for MySQL -MYSQL_USER monapi The MySQL username -MYSQL_PASSWORD password The MySQL password -MYSQL_DB mon The MySQL database name -MYSQL_WAIT_RETRIES 24 Number of MySQL connection attempts -MYSQL_WAIT_DELAY 5 Seconds to wait between attempts -======================== ================================ ========================================= - -Scripts -~~~~~~~ -start.sh - In this starting script provide all steps that lead to the proper service - start. Including usage of wait scripts and templating of configuration - files. You also could provide the ability to allow running container after - service died for easier debugging. - -health_check.py - This file will be used for checking the status of the application. - -Provide Configuration templates -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -* monasca-api.conf.j2 -* api-config.ini.j2 -* api-logging.conf.j2 - - -Links -~~~~~ -https://docs.openstack.org/monasca-api/latest/ - -https://github.com/openstack/monasca-api/blob/master/README.rst diff --git a/docker/api-config.ini.j2 b/docker/api-config.ini.j2 deleted file mode 100644 index e0f3b44dc..000000000 --- a/docker/api-config.ini.j2 +++ /dev/null @@ -1,27 +0,0 @@ -[DEFAULT] -name = monasca_api - -[pipeline:main] -pipeline = request_id auth api - -[app:api] -paste.app_factory = monasca_api.api.server:launch - -[filter:auth] -paste.filter_factory = monasca_api.healthcheck.keystone_protocol:filter_factory - -[filter:request_id] -paste.filter_factory = oslo_middleware.request_id:RequestId.factory - -[server:main] -use = egg:gunicorn#main -host = 0.0.0.0 -port = {{ MONASCA_CONTAINER_API_PORT }} -workers = 9 -worker-connections = 2000 -worker-class = eventlet -timeout = 30 -backlog = 2048 -keepalive = 2 -proc_name = monasca_api -#loglevel = DEBUG diff --git a/docker/api-gunicorn.conf.j2 b/docker/api-gunicorn.conf.j2 deleted file mode 100644 index d6bca3725..000000000 --- a/docker/api-gunicorn.conf.j2 +++ /dev/null @@ -1,15 +0,0 @@ -bind = '0.0.0.0:{{ MONASCA_CONTAINER_API_PORT }}' -proc_name = 'monasca-api' - -backlog = {{ GUNICORN_BACKLOG | int }} -workers = {{ GUNICORN_WORKERS | int }} -worker_class = '{{ GUNICORN_WORKER_CLASS }}' -worker_connections = '{{ GUNICORN_WORKER_CONNECTIONS }}' -timeout = {{ GUNICORN_TIMEOUT | int }} - -{% if ADD_ACCESS_LOG == true %} -accesslog = '-' -{% endif %} -access_log_format = '{{ ACCESS_LOG_FIELDS }}' - -capture_output = True diff --git a/docker/api-logging.conf.j2 b/docker/api-logging.conf.j2 deleted file mode 100644 index 517f46dae..000000000 --- a/docker/api-logging.conf.j2 +++ /dev/null @@ -1,71 +0,0 @@ -[default] -disable_existing_loggers = 0 - -[loggers] -keys = root, gunicorn.access, sqlalchemy, kafka, kafka.consumer, urllib3 - -[handlers] -keys = console, console_access - -[formatters] -keys = context, generic - -[logger_root] -level = {{ LOG_LEVEL_ROOT }} -handlers = console - -[logger_gunicorn.access] -level = INFO -handlers = console_access -propagate = 0 -qualname = gunicorn.access - -[logger_sqlalchemy] -qualname = sqlalchemy.engine -# "level = INFO" logs SQL queries. -# "level = DEBUG" logs SQL queries and results. -# "level = WARN" logs neither. (Recommended for production systems.) -level = ERROR -handlers = console -propagate=0 - -[logger_kafka.consumer] -qualname = kafka.consumer -level = INFO -formatter = default -handlers = console -propagate = 0 - -[logger_kafka] -qualname = monasca_common.kafka_lib -level = INFO -formatter = default -handlers = console -propagate = 0 - -[logger_urllib3] -qualname = urllib3.connectionpool -level = INFO -formatter = default -handlers = console -propagate = 0 - -[handler_console] -class = logging.StreamHandler -args = (sys.stdout,) -level = {{ LOG_LEVEL_CONSOLE }} -formatter = context - -[handler_console_access] -class = logging.StreamHandler -args = (sys.stdout,) -level = {{ LOG_LEVEL_ACCESS }} -formatter = generic - -[formatter_context] -class = oslo_log.formatters.ContextFormatter - -[formatter_generic] -format={{ ACCESS_LOG_FORMAT }} -datefmt=%Y-%m-%d %H:%M:%S -class=logging.Formatter diff --git a/docker/build_image.sh b/docker/build_image.sh deleted file mode 100755 index 59f2325f2..000000000 --- a/docker/build_image.sh +++ /dev/null @@ -1,150 +0,0 @@ -#!/bin/bash - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# TODO(Dobroslaw): move this script to monasca-common/docker folder -# and leave here small script to download it and execute using env variables -# to minimize code duplication. - -set -x # Print each script step. -set -eo pipefail # Exit the script if any statement returns error. - -# This script is used for building Docker image with proper labels -# and proper version of monasca-common. -# -# Example usage: -# $ ./build_image.sh -# -# Everything after `./build_image.sh` is optional and by default configured -# to get versions from `Dockerfile`. -# -# To build from master branch (default): -# $ ./build_image.sh -# To build specific version run this script in the following way: -# $ ./build_image.sh stable/queens -# Building from specific commit: -# $ ./build_image.sh cb7f226 -# When building from a tag monasca-common will be used in version available -# in upper constraint file: -# $ ./build_image.sh 2.5.0 -# To build image from Gerrit patch sets that is targeting branch stable/queens: -# $ ./build_image.sh refs/changes/51/558751/1 stable/queens -# -# If you want to build image with custom monasca-common version you need -# to provide it as in the following example: -# $ ./build_image.sh master master refs/changes/19/595719/3 - -# Go to folder with Docker files. -REAL_PATH=$(python3 -c "import os,sys; print(os.path.realpath('$0'))") -cd "$(dirname "$REAL_PATH")/../docker/" - -[ -z "$DOCKER_IMAGE" ] && \ - DOCKER_IMAGE=$(\grep DOCKER_IMAGE Dockerfile | cut -f2 -d"=") - -: "${REPO_VERSION:=$1}" -[ -z "$REPO_VERSION" ] && \ - REPO_VERSION=$(\grep REPO_VERSION Dockerfile | cut -f2 -d"=") -# Let's stick to more readable version and disable SC2001 here. -# shellcheck disable=SC2001 -REPO_VERSION_CLEAN=$(echo "$REPO_VERSION" | sed 's|/|-|g') - -[ -z "$APP_REPO" ] && APP_REPO=$(\grep APP_REPO Dockerfile | cut -f2 -d"=") -GITHUB_REPO=$(echo "$APP_REPO" | sed 's/review.opendev.org/github.com/' | \ - sed 's/ssh:/https:/') - -if [ -z "$CONSTRAINTS_FILE" ]; then - CONSTRAINTS_FILE=$(\grep CONSTRAINTS_FILE Dockerfile | cut -f2 -d"=") || true - : "${CONSTRAINTS_FILE:=https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}" -fi - -: "${CONSTRAINTS_BRANCH:=$2}" -[ -z "$CONSTRAINTS_BRANCH" ] && \ - CONSTRAINTS_BRANCH=$(\grep CONSTRAINTS_BRANCH Dockerfile | cut -f2 -d"=") - -# When using stable version of repository use same stable constraints file. -case "$REPO_VERSION" in - *stable*) - CONSTRAINTS_BRANCH_CLEAN="$REPO_VERSION" - CONSTRAINTS_FILE=${CONSTRAINTS_FILE/master/$CONSTRAINTS_BRANCH_CLEAN} - # Get monasca-common version from stable upper constraints file. - CONSTRAINTS_TMP_FILE=$(mktemp) - wget --output-document "$CONSTRAINTS_TMP_FILE" \ - $CONSTRAINTS_FILE - UPPER_COMMON=$(\grep 'monasca-common' "$CONSTRAINTS_TMP_FILE") - # Get only version part from monasca-common. - UPPER_COMMON_VERSION="${UPPER_COMMON##*===}" - rm -rf "$CONSTRAINTS_TMP_FILE" - ;; - *) - CONSTRAINTS_BRANCH_CLEAN="$CONSTRAINTS_BRANCH" - ;; -esac - -# Monasca-common variables. -if [ -z "$COMMON_REPO" ]; then - COMMON_REPO=$(\grep COMMON_REPO Dockerfile | cut -f2 -d"=") || true - : "${COMMON_REPO:=https://review.opendev.org/openstack/monasca-common}" -fi -: "${COMMON_VERSION:=$3}" -if [ -z "$COMMON_VERSION" ]; then - COMMON_VERSION=$(\grep COMMON_VERSION Dockerfile | cut -f2 -d"=") || true - if [ "$UPPER_COMMON_VERSION" ]; then - # Common from upper constraints file. - COMMON_VERSION="$UPPER_COMMON_VERSION" - fi -fi - -# Clone project to temporary directory for getting proper commit number from -# branches and tags. We need this for setting proper image labels. -# Docker does not allow to get any data from inside of system when building -# image. -TMP_DIR=$(mktemp -d) -( - cd "$TMP_DIR" - # This many steps are needed to support gerrit patch sets. - git init - git remote add origin "$APP_REPO" - git fetch origin "$REPO_VERSION" - git reset --hard FETCH_HEAD -) -GIT_COMMIT=$(git -C "$TMP_DIR" rev-parse HEAD) -[ -z "${GIT_COMMIT}" ] && echo "No git commit hash found" && exit 1 -rm -rf "$TMP_DIR" - -# Do the same for monasca-common. -COMMON_TMP_DIR=$(mktemp -d) -( - cd "$COMMON_TMP_DIR" - # This many steps are needed to support gerrit patch sets. - git init - git remote add origin "$COMMON_REPO" - git fetch origin "$COMMON_VERSION" - git reset --hard FETCH_HEAD -) -COMMON_GIT_COMMIT=$(git -C "$COMMON_TMP_DIR" rev-parse HEAD) -[ -z "${COMMON_GIT_COMMIT}" ] && echo "No git commit hash found" && exit 1 -rm -rf "$COMMON_TMP_DIR" - -CREATION_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") - -docker build --no-cache \ - --build-arg CREATION_TIME="$CREATION_TIME" \ - --build-arg GITHUB_REPO="$GITHUB_REPO" \ - --build-arg APP_REPO="$APP_REPO" \ - --build-arg REPO_VERSION="$REPO_VERSION" \ - --build-arg GIT_COMMIT="$GIT_COMMIT" \ - --build-arg CONSTRAINTS_FILE="$CONSTRAINTS_FILE" \ - --build-arg COMMON_REPO="$COMMON_REPO" \ - --build-arg COMMON_VERSION="$COMMON_VERSION" \ - --build-arg COMMON_GIT_COMMIT="$COMMON_GIT_COMMIT" \ - --tag "$DOCKER_IMAGE":"$REPO_VERSION_CLEAN" . diff --git a/docker/health_check.py b/docker/health_check.py deleted file mode 100755 index 312506539..000000000 --- a/docker/health_check.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 - -# (C) Copyright 2018 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Health check will returns 0 when service is working properly.""" - -import logging -import os -import sys -from urllib import request - - -LOG_LEVEL = logging.getLevelName(os.environ.get('LOG_LEVEL', 'INFO')) -logging.basicConfig(level=LOG_LEVEL) -logger = logging.getLogger(__name__) - -API_PORT = os.environ.get('MONASCA_CONTAINER_API_PORT', '8070') -url = "http://localhost:" + API_PORT + "/healthcheck" - - -def main(): - """Send health check request to health check endpoint of Monasca API.""" - logger.debug('Send health check request to %s', url) - try: - request.urlopen(url=url) - except Exception as ex: - logger.error('Exception during request handling: ' + repr(ex)) - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/docker/monasca-api.conf.j2 b/docker/monasca-api.conf.j2 deleted file mode 100644 index 3e2015e4c..000000000 --- a/docker/monasca-api.conf.j2 +++ /dev/null @@ -1,870 +0,0 @@ -[DEFAULT] - -# -# From monasca_api -# - -# -# Region that API is running in -# (string value) -# -# This option has a sample default set, which means that -# its actual default value may vary from the one documented -# below. -region = {{ DEFAULT_REGION }} - -# -# Valid periods for notification methods -# (list value) -#valid_notification_periods = 0,60 - -# -# Enable Metrics api endpoints (boolean value) -enable_metrics_api = {{ ENABLE_METRICS_API }} - -# -# Enable Logs api endpoints (boolean value) -enable_logs_api = {{ ENABLE_LOGS_API }} - -# -# From oslo.log -# - -# If set to true, the logging level will be set to DEBUG instead of the default -# INFO level (boolean value) -# Note: This option can be changed without restarting. -#debug = false - -# The name of a logging configuration file. This file is appended to any -# existing logging configuration files. For details about logging configuration -# files, see the Python logging module documentation. Note that when logging -# configuration files are used then all logging configuration is set in the -# configuration file and other logging configuration options are ignored (for -# example, logging_context_format_string) (string value) -# Note: This option can be changed without restarting. -# Deprecated group/name - [DEFAULT]/log_config -log_config_append=/etc/monasca/api-logging.conf - -# Defines the format string for %%(asctime)s in log records. Default: -# %(default)s . This option is ignored if log_config_append is set (string -# value) -#log_date_format = %Y-%m-%d %H:%M:%S - -# (Optional) Name of log file to send logging output to. If no default is set, -# logging will go to stderr as defined by use_stderr. This option is ignored if -# log_config_append is set (string value) -# Deprecated group/name - [DEFAULT]/logfile -#log_file = - -# (Optional) The base directory used for relative log_file paths. This option -# is ignored if log_config_append is set (string value) -# Deprecated group/name - [DEFAULT]/logdir -#log_dir = - -# Uses logging handler designed to watch file system. When log file is moved or -# removed this handler will open a new log file with specified path -# instantaneously. It makes sense only if log_file option is specified and -# Linux platform is used. This option is ignored if log_config_append is set -# (boolean value) -#watch_log_file = false - -# Use syslog for logging. Existing syslog format is DEPRECATED and will be -# changed later to honor RFC5424. This option is ignored if log_config_append -# is set (boolean value) -#use_syslog = false - -# Enable journald for logging. If running in a systemd environment you may wish -# to enable journal support. Doing so will use the journal native protocol -# which includes structured metadata in addition to log messages.This option is -# ignored if log_config_append is set (boolean value) -#use_journal = false - -# Syslog facility to receive log lines. This option is ignored if -# log_config_append is set (string value) -#syslog_log_facility = LOG_USER - -# Use JSON formatting for logging. This option is ignored if log_config_append -# is set (boolean value) -#use_json = false - -# Log output to standard error. This option is ignored if log_config_append is -# set (boolean value) -#use_stderr = false - -# Log output to Windows Event Log (boolean value) -#use_eventlog = false - -# The amount of time before the log files are rotated. This option is ignored -# unless log_rotation_type is setto "interval" (integer value) -#log_rotate_interval = 1 - -# Rotation interval type. The time of the last file change (or the time when -# the service was started) is used when scheduling the next rotation (string -# value) -# Possible values: -# Seconds - -# Minutes - -# Hours - -# Days - -# Weekday - -# Midnight - -#log_rotate_interval_type = days - -# Maximum number of rotated log files (integer value) -#max_logfile_count = 30 - -# Log file maximum size in MB. This option is ignored if "log_rotation_type" is -# not set to "size" (integer value) -#max_logfile_size_mb = 200 - -# Log rotation type (string value) -# Possible values: -# interval - Rotate logs at predefined time intervals. -# size - Rotate logs once they reach a predefined size. -# none - Do not rotate log files. -#log_rotation_type = none - -# Format string to use for log messages with context (string value) -#logging_context_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s - -# Format string to use for log messages when context is undefined (string -# value) -#logging_default_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s - -# Additional data to append to log message when logging level for the message -# is DEBUG (string value) -#logging_debug_format_suffix = %(funcName)s %(pathname)s:%(lineno)d - -# Prefix each line of exception output with this format (string value) -#logging_exception_prefix = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s - -# Defines the format string for %(user_identity)s that is used in -# logging_context_format_string (string value) -#logging_user_identity_format = %(user)s %(tenant)s %(domain)s %(user_domain)s %(project_domain)s - -# List of package logging levels in logger=LEVEL pairs. This option is ignored -# if log_config_append is set (list value) -#default_log_levels = amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,oslo.messaging=INFO,oslo_messaging=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN,urllib3.connectionpool=WARN,websocket=WARN,requests.packages.urllib3.util.retry=WARN,urllib3.util.retry=WARN,keystonemiddleware=WARN,routes.middleware=WARN,stevedore=WARN,taskflow=WARN,keystoneauth=WARN,oslo.cache=INFO,oslo_policy=INFO,dogpile.core.dogpile=INFO - -# Enables or disables publication of error events (boolean value) -#publish_errors = false - -# The format for an instance that is passed with the log message (string value) -#instance_format = "[instance: %(uuid)s] " - -# The format for an instance UUID that is passed with the log message (string -# value) -#instance_uuid_format = "[instance: %(uuid)s] " - -# Interval, number of seconds, of log rate limiting (integer value) -#rate_limit_interval = 0 - -# Maximum number of logged messages per rate_limit_interval (integer value) -#rate_limit_burst = 0 - -# Log level name used by rate limiting: CRITICAL, ERROR, INFO, WARNING, DEBUG -# or empty string. Logs with level greater or equal to rate_limit_except_level -# are not filtered. An empty string means that all levels are filtered (string -# value) -#rate_limit_except_level = CRITICAL - -# Enables or disables fatal status of deprecations (boolean value) -#fatal_deprecations = false - - -[cassandra] - -# -# From monasca_api -# - -# -# Comma separated list of Cassandra node IP addresses -# (list value) -contact_points = {{ CASSANDRA_CONTACT_POINTS | default('127.0.0.1') }} - -# -# Port to Cassandra server -# (port value) -port = {{ CASSANDRA_PORT | default('9042') }} - -# -# keyspace where metric are stored -# (string value) -keyspace = {{ CASSANDRA_KEY_SPACE }} - -# -# Cassandra user for monasca-api service -# (string value) -user = {{ CASSANDRA_USER }} - -# -# Cassandra user password for monasca-api service -# (string value) -password = {{ CASSANDRA_PASSWORD }} - -# -# Cassandra connection timeout in seconds -# (integer value) -connection_timeout = {{ CASSANDRA_CONNECTION_TIMEOUT }} - - -[database] - -# -# From oslo.db -# - -# If True, SQLite uses synchronous mode (boolean value) -#sqlite_synchronous = true - -# The back end to use for the database (string value) -# Deprecated group/name - [DEFAULT]/db_backend -#backend = sqlalchemy - -# The SQLAlchemy connection string to use to connect to the database (string -# value) -# Deprecated group/name - [DEFAULT]/sql_connection -# Deprecated group/name - [DATABASE]/sql_connection -# Deprecated group/name - [sql]/connection -{% if not ( API_MYSQL_DISABLED is defined and API_MYSQL_DISABLED | lower == 'true' ) %} -connection = "mysql+pymysql://{{ MYSQL_USER }}:{{ MYSQL_PASSWORD }}@{{ MYSQL_HOST }}:{{ MYSQL_PORT | default('3306') }}/{{ MYSQL_DB }}" -{% endif %} - - -# The SQLAlchemy connection string to use to connect to the slave database -# (string value) -#slave_connection = - -# The SQL mode to be used for MySQL sessions. This option, including the -# default, overrides any server-set SQL mode. To use whatever SQL mode is set -# by the server configuration, set this to no value. Example: mysql_sql_mode= -# (string value) -#mysql_sql_mode = TRADITIONAL - -# If True, transparently enables support for handling MySQL Cluster (NDB) -# (boolean value) -#mysql_enable_ndb = false - -# Connections which have been present in the connection pool longer than this -# number of seconds will be replaced with a new one the next time they are -# checked out from the pool (integer value) -# Deprecated group/name - [DATABASE]/idle_timeout -# Deprecated group/name - [database]/idle_timeout -# Deprecated group/name - [DEFAULT]/sql_idle_timeout -# Deprecated group/name - [DATABASE]/sql_idle_timeout -# Deprecated group/name - [sql]/idle_timeout -#connection_recycle_time = 3600 - -# DEPRECATED: Minimum number of SQL connections to keep open in a pool (integer -# value) -# Deprecated group/name - [DEFAULT]/sql_min_pool_size -# Deprecated group/name - [DATABASE]/sql_min_pool_size -# This option is deprecated for removal. -# Its value may be silently ignored in the future. -# Reason: The option to set the minimum pool size is not supported by -# sqlalchemy. -#min_pool_size = 1 - -# Maximum number of SQL connections to keep open in a pool. Setting a value of -# 0 indicates no limit (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_pool_size -# Deprecated group/name - [DATABASE]/sql_max_pool_size -#max_pool_size = 5 - -# Maximum number of database connection retries during startup. Set to -1 to -# specify an infinite retry count (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_retries -# Deprecated group/name - [DATABASE]/sql_max_retries -#max_retries = 10 - -# Interval between retries of opening a SQL connection (integer value) -# Deprecated group/name - [DEFAULT]/sql_retry_interval -# Deprecated group/name - [DATABASE]/reconnect_interval -#retry_interval = 10 - -# If set, use this value for max_overflow with SQLAlchemy (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_overflow -# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow -#max_overflow = 50 - -# Verbosity of SQL debugging information: 0=None, 100=Everything (integer -# value) -# Minimum value: 0 -# Maximum value: 100 -# Deprecated group/name - [DEFAULT]/sql_connection_debug -#connection_debug = 0 - -# Add Python stack traces to SQL as comment strings (boolean value) -# Deprecated group/name - [DEFAULT]/sql_connection_trace -#connection_trace = false - -# If set, use this value for pool_timeout with SQLAlchemy (integer value) -# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout -#pool_timeout = - -# Enable the experimental use of database reconnect on connection lost (boolean -# value) -#use_db_reconnect = false - -# Seconds between retries of a database transaction (integer value) -#db_retry_interval = 1 - -# If True, increases the interval between retries of a database operation up to -# db_max_retry_interval (boolean value) -#db_inc_retry_interval = true - -# If db_inc_retry_interval is set, the maximum seconds between retries of a -# database operation (integer value) -#db_max_retry_interval = 10 - -# Maximum retries in case of connection error or deadlock error before error is -# raised. Set to -1 to specify an infinite retry count (integer value) -#db_max_retries = 20 - -# Optional URL parameters to append onto the connection URL at connect time; -# specify as param1=value1¶m2=value2& (string value) -#connection_parameters = - - -[dispatcher] - -# -# From monasca_api -# - -# Versions controller (string value) -versions = monasca_api.v2.reference.versions:Versions - -# Version 2.0 controller (string value) -version_2_0 = monasca_api.v2.reference.version_2_0:Version2 - -# Metrics controller (string value) -metrics = monasca_api.v2.reference.metrics:Metrics - -# Metrics measurements controller (string value) -metrics_measurements = monasca_api.v2.reference.metrics:MetricsMeasurements - -# Metrics statistics controller (string value) -metrics_statistics = monasca_api.v2.reference.metrics:MetricsStatistics - -# Metrics names controller (string value) -metrics_names = monasca_api.v2.reference.metrics:MetricsNames - -# Alarm definitions controller (string value) -alarm_definitions = monasca_api.v2.reference.alarm_definitions:AlarmDefinitions - -# Alarms controller (string value) -alarms = monasca_api.v2.reference.alarms:Alarms - -# Alarms Count controller (string value) -alarms_count = monasca_api.v2.reference.alarms:AlarmsCount - -# Alarms state history controller (string value) -alarms_state_history = monasca_api.v2.reference.alarms:AlarmsStateHistory - -# Notification Methods controller (string value) -notification_methods = monasca_api.v2.reference.notifications:Notifications - -# Dimension Values controller (string value) -dimension_values = monasca_api.v2.reference.metrics:DimensionValues - -# Dimension Names controller (string value) -dimension_names = monasca_api.v2.reference.metrics:DimensionNames - -# Notifications Type Methods controller (string value) -notification_method_types = monasca_api.v2.reference.notificationstype:NotificationsType - -# Health checks endpoint controller (string value) -healthchecks = monasca_api.healthchecks:HealthChecks - - -[influxdb] - -# -# From monasca_api -# - -# -# Database name where metrics are stored -# (string value) -database_name = {{ INFLUX_DB }} - -# -# IP address to Influxdb server -# (host address value) -ip_address = {{ INFLUX_HOST }} - -# Port to Influxdb server (port value) -# Minimum value: 0 -# Maximum value: 65535 -port = {{ INFLUX_PORT }} - -# -# Influxdb user -# (string value) -user = {{ INFLUX_USER }} - -# -# Influxdb password -# (string value) -password = {{ INFLUX_PASSWORD }} - - -[kafka] - -# -# From monasca_api -# - -# -# Comma separated list of Kafka broker host:port -# (list value) -uri = {{ KAFKA_URI }} - -# -# The topic that metrics will be published to -# (string value) -metrics_topic = metrics - -# -# The topic that events will be published too -# (string value) -#events_topic = events - -# The topic that logs will be published to (multi valued) -#logs_topics = log - -# -# The topic that alarm state will be published too -# (string value) -#alarm_state_transitions_topic = alarm-state-transitions - -# -# The group name that this service belongs to -# (string value) -group = api - -# -# The ack time back to kafka. -# (integer value) -#ack_time = 20 - -# -# The number of retry when there is a connection error -# (integer value) -max_retry = 1 - -# -# The type of posting -# (boolean value) -is_async = true - -# -# Specify if the message received should be parsed. -# If True, message will not be parsed, otherwise -# messages will be parsed -# (boolean value) -compact = true - -# -# The partitions this connection should -# listen for messages on. Currently does not -# support multiple partitions. -# Default is to listen on partition 0 -# (list value) -partitions = 0 - -# -# Specify if received data should be simply dropped. -# This parameter is only for testing purposes -# (boolean value) -#drop_data = false - -# -# The wait time when no messages on kafka queue -# (integer value) -# Minimum value: 1 -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -wait_time = 1 - -# -# Should messages be automatically committed -# (boolean value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -#auto_commit = false - -# -# Enable legacy Kafka client. When set old version of -# kafka-python library is used. Message format version -# for the brokers should be set to 0.9.0.0 to avoid -# performance issues until all consumers are upgraded. -legacy_kafka_client_enabled = {{ KAFKA_LEGACY_CLIENT_ENABLED | default(false) }} - - -[keystone_authtoken] - -auth_url = {{ KEYSTONE_IDENTITY_URI }} -username = {{ KEYSTONE_ADMIN_USER }} -password = {{ KEYSTONE_ADMIN_PASSWORD }} -user_domain_name = Default -project_name = {{ KEYSTONE_ADMIN_TENANT }} -project_domain_name = Default - -# -# From keystonemiddleware.auth_token -# - -# Complete "public" Identity API endpoint. This endpoint should not be an -# "admin" endpoint, as it should be accessible by all end users. -# Unauthenticated clients are redirected to this endpoint to authenticate. -# Although this endpoint should ideally be unversioned, client support in the -# wild varies. If you're using a versioned v2 endpoint here, then this should -# *not* be the same endpoint the service user utilizes for validating tokens, -# because normal end users may not be able to reach that endpoint (string -# value) -# Deprecated group/name - [keystone_authtoken]/auth_uri -www_authenticate_uri = {{ KEYSTONE_AUTH_URI }} - -# DEPRECATED: Complete "public" Identity API endpoint. This endpoint should not -# be an "admin" endpoint, as it should be accessible by all end users. -# Unauthenticated clients are redirected to this endpoint to authenticate. -# Although this endpoint should ideally be unversioned, client support in the -# wild varies. If you're using a versioned v2 endpoint here, then this should -# *not* be the same endpoint the service user utilizes for validating tokens, -# because normal end users may not be able to reach that endpoint. This option -# is deprecated in favor of www_authenticate_uri and will be removed in the S -# release (string value) -# This option is deprecated for removal since Queens. -# Its value may be silently ignored in the future. -# Reason: The auth_uri option is deprecated in favor of www_authenticate_uri -# and will be removed in the S release. -#auth_uri = - -# API version of the admin Identity API endpoint (string value) -#auth_version = - -# Do not handle authorization requests within the middleware, but delegate the -# authorization decision to downstream WSGI components (boolean value) -#delay_auth_decision = false - -# Request timeout value for communicating with Identity API server (integer -# value) -#http_connect_timeout = - -# How many times are we trying to reconnect when communicating with Identity -# API Server (integer value) -#http_request_max_retries = 3 - -# Request environment key where the Swift cache object is stored. When -# auth_token middleware is deployed with a Swift cache, use this option to have -# the middleware share a caching backend with swift. Otherwise, use the -# ``memcached_servers`` option instead (string value) -#cache = - -# Required if identity server requires client certificate (string value) -#certfile = - -# Required if identity server requires client certificate (string value) -#keyfile = - -# A PEM encoded Certificate Authority to use when verifying HTTPs connections. -# Defaults to system CAs (string value) -#cafile = - -# Verify HTTPS connections (boolean value) -insecure = {{ KEYSTONE_INSECURE }} - -# The region in which the identity server can be found (string value) -{% if KEYSTONE_REGION_NAME is defined %} -region_name = {{ KEYSTONE_REGION_NAME }} -{% endif %} - -# DEPRECATED: Directory used to cache files related to PKI tokens. This option -# has been deprecated in the Ocata release and will be removed in the P release -# (string value) -# This option is deprecated for removal since Ocata. -# Its value may be silently ignored in the future. -# Reason: PKI token format is no longer supported. -#signing_dir = - -# Optionally specify a list of memcached server(s) to use for caching. If left -# undefined, tokens will instead be cached in-process (list value) -# Deprecated group/name - [keystone_authtoken]/memcache_servers -memcached_servers = {{ MEMCACHED_URI }} - -# In order to prevent excessive effort spent validating tokens, the middleware -# caches previously-seen tokens for a configurable duration (in seconds). Set -# to -1 to disable caching completely (integer value) -#token_cache_time = 300 - -# DEPRECATED: Determines the frequency at which the list of revoked tokens is -# retrieved from the Identity service (in seconds). A high number of revocation -# events combined with a low cache duration may significantly reduce -# performance. Only valid for PKI tokens. This option has been deprecated in -# the Ocata release and will be removed in the P release (integer value) -# This option is deprecated for removal since Ocata. -# Its value may be silently ignored in the future. -# Reason: PKI token format is no longer supported. -#revocation_cache_time = 10 - -# (Optional) If defined, indicate whether token data should be authenticated or -# authenticated and encrypted. If MAC, token data is authenticated (with HMAC) -# in the cache. If ENCRYPT, token data is encrypted and authenticated in the -# cache. If the value is not one of these options or empty, auth_token will -# raise an exception on initialization (string value) -# Possible values: -# None - -# MAC - -# ENCRYPT - -#memcache_security_strategy = None - -# (Optional, mandatory if memcache_security_strategy is defined) This string is -# used for key derivation (string value) -#memcache_secret_key = - -# (Optional) Number of seconds memcached server is considered dead before it is -# tried again (integer value) -#memcache_pool_dead_retry = 300 - -# (Optional) Maximum total number of open connections to every memcached server -# (integer value) -#memcache_pool_maxsize = 10 - -# (Optional) Socket timeout in seconds for communicating with a memcached -# server (integer value) -#memcache_pool_socket_timeout = 3 - -# (Optional) Number of seconds a connection to memcached is held unused in the -# pool before it is closed (integer value) -#memcache_pool_unused_timeout = 60 - -# (Optional) Number of seconds that an operation will wait to get a memcached -# client connection from the pool (integer value) -#memcache_pool_conn_get_timeout = 10 - -# (Optional) Use the advanced (eventlet safe) memcached client pool. The -# advanced pool will only work under python 2.x (boolean value) -#memcache_use_advanced_pool = false - -# (Optional) Indicate whether to set the X-Service-Catalog header. If False, -# middleware will not ask for service catalog on token validation and will not -# set the X-Service-Catalog header (boolean value) -#include_service_catalog = true - -# Used to control the use and type of token binding. Can be set to: "disabled" -# to not check token binding. "permissive" (default) to validate binding -# information if the bind type is of a form known to the server and ignore it -# if not. "strict" like "permissive" but if the bind type is unknown the token -# will be rejected. "required" any form of token binding is needed to be -# allowed. Finally the name of a binding method that must be present in tokens -# (string value) -#enforce_token_bind = permissive - -# DEPRECATED: If true, the revocation list will be checked for cached tokens. -# This requires that PKI tokens are configured on the identity server (boolean -# value) -# This option is deprecated for removal since Ocata. -# Its value may be silently ignored in the future. -# Reason: PKI token format is no longer supported. -#check_revocations_for_cached = false - -# DEPRECATED: Hash algorithms to use for hashing PKI tokens. This may be a -# single algorithm or multiple. The algorithms are those supported by Python -# standard hashlib.new(). The hashes will be tried in the order given, so put -# the preferred one first for performance. The result of the first hash will be -# stored in the cache. This will typically be set to multiple values only while -# migrating from a less secure algorithm to a more secure one. Once all the old -# tokens are expired this option should be set to a single value for better -# performance (list value) -# This option is deprecated for removal since Ocata. -# Its value may be silently ignored in the future. -# Reason: PKI token format is no longer supported. -#hash_algorithms = md5 - -# A choice of roles that must be present in a service token. Service tokens are -# allowed to request that an expired token can be used and so this check should -# tightly control that only actual services should be sending this token. Roles -# here are applied as an ANY check so any role in this list must be present. -# For backwards compatibility reasons this currently only affects the -# allow_expired check (list value) -#service_token_roles = service - -# For backwards compatibility reasons we must let valid service tokens pass -# that don't pass the service_token_roles check as valid. Setting this true -# will become the default in a future release and should be enabled if possible -# (boolean value) -service_token_roles_required = true - -# Authentication type to load (string value) -# Deprecated group/name - [keystone_authtoken]/auth_plugin -auth_type = password - -# Config Section from which to load plugin specific options (string value) -#auth_section = - - -[messaging] - -# -# From monasca_api -# - -# -# The message queue driver to use -# (string value) -driver = monasca_api.common.messaging.kafka_publisher:KafkaPublisher - -# DEPRECATED: -# The type of metrics message format to publish to the message queue -# (string value) -# This option is deprecated for removal since 2.1.0. -# Its value may be silently ignored in the future. -# Reason: -# Option is not used anywhere in the codebase -#metrics_message_format = reference - -# DEPRECATED: -# The type of events message format to publish to the message queue -# (string value) -# This option is deprecated for removal since 2.1.0. -# Its value may be silently ignored in the future. -# Reason: -# Option is not used anywhere in the codebase -#events_message_format = reference - - -[oslo_policy] - -# -# From oslo.policy -# - -# This option controls whether or not to enforce scope when evaluating -# policies. If ``True``, the scope of the token used in the request is compared -# to the ``scope_types`` of the policy being enforced. If the scopes do not -# match, an ``InvalidScope`` exception will be raised. If ``False``, a message -# will be logged informing operators that policies are being invoked with -# mismatching scope (boolean value) -#enforce_scope = false - -# The file that defines policies (string value) -#policy_file = policy.yaml - -# Default rule. Enforced when a requested rule is not found (string value) -#policy_default_rule = default - -# Directories where policy configuration files are stored. They can be relative -# to any directory in the search path defined by the config_dir option, or -# absolute paths. The file defined by policy_file must exist for these -# directories to be searched. Missing or empty directories are ignored (multi -# valued) -#policy_dirs = policy.d - -# Content Type to send and receive data for REST based policy check (string -# value) -# Possible values: -# application/x-www-form-urlencoded - -# application/json - -#remote_content_type = application/x-www-form-urlencoded - -# server identity verification for REST based policy check (boolean value) -#remote_ssl_verify_server_crt = false - -# Absolute path to ca cert file for REST based policy check (string value) -#remote_ssl_ca_crt_file = - -# Absolute path to client cert for REST based policy check (string value) -#remote_ssl_client_crt_file = - -# Absolute path client key file REST based policy check (string value) -#remote_ssl_client_key_file = - - -[repositories] - -# -# From monasca_api -# - -# -# The repository driver to use for metrics -# (string value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -{% if DATABASE_BACKEND | lower == 'cassandra' %} -metrics_driver = monasca_api.common.repositories.cassandra.metrics_repository:MetricsRepository -{% else %} -metrics_driver = monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository -{% endif %} - -# -# The repository driver to use for alarm definitions -# (string value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -alarm_definitions_driver = monasca_api.common.repositories.sqla.alarm_definitions_repository:AlarmDefinitionsRepository - -# -# The repository driver to use for alarms -# (string value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -alarms_driver = monasca_api.common.repositories.sqla.alarms_repository:AlarmsRepository - -# -# The repository driver to use for notifications -# (string value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -notifications_driver = monasca_api.common.repositories.sqla.notifications_repository:NotificationsRepository - -# -# The repository driver to use for notifications -# (string value) -# Advanced Option: intended for advanced users and not used -# by the majority of users, and might have a significant -# effect on stability and/or performance. -notification_method_type_driver = monasca_api.common.repositories.sqla.notification_method_type_repository:NotificationMethodTypeRepository - - -[security] - -# -# From monasca_api -# - -# Roles that are allowed to check the health (list value) -#healthcheck_roles = @ - -# Roles that are allowed to check the versions (list value) -#versions_roles = @ - -# -# Roles that are allowed full access to the API -# (list value) -default_authorized_roles = {{ AUTHORIZED_ROLES | default('admin, domainuser, domainadmin, monasca-user') }} - -# -# Roles that are only allowed to POST to the API -# (list value) -agent_authorized_roles = {{ AGENT_AUTHORIZED_ROLES | default('monasca-agent') }} - -# -# Roles that are only allowed to GET from the API -# (list value) -read_only_authorized_roles = {{ READ_ONLY_AUTHORIZED_ROLES | default('monasca-read-only-user') }} - -# -# Roles that are allowed to POST metrics on -# behalf of another tenant -# (list value) -delegate_authorized_roles = {{ DELEGATE_AUTHORIZED_ROLES | default('admin') }} diff --git a/docker/start.sh b/docker/start.sh deleted file mode 100644 index 5428dfe2a..000000000 --- a/docker/start.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Starting script. -# All checks and configuration templating you need to do before service -# could be safely started should be added in this file. - -set -eo pipefail # Exit the script if any statement returns error. - -# Test services we need before starting our service. -echo "Start script: waiting for needed services" -python3 /kafka_wait_for_topics.py -python3 /mysql_check.py - -# Template all config files before start, it will use env variables. -# Read usage examples: https://pypi.org/project/Templer/ -echo "Start script: creating config files from templates" -templer -v -f /etc/monasca/monasca-api.conf.j2 /etc/monasca/monasca-api.conf -templer -v -f /etc/monasca/api-config.ini.j2 /etc/monasca/api-config.ini -templer -v -f /etc/monasca/api-logging.conf.j2 /etc/monasca/api-logging.conf -templer -v -f /etc/monasca/api-gunicorn.conf.j2 /etc/monasca/api-gunicorn.conf - -# Start our service. -echo "Start script: starting container" -gunicorn \ - --config /etc/monasca/api-gunicorn.conf \ - --paste /etc/monasca/api-config.ini - -# Allow server to stay alive in case of failure for 2 hours for debugging. -RESULT=$? -if [ $RESULT != 0 ] && [ "$STAY_ALIVE_ON_FAILURE" = "true" ]; then - echo "Service died, waiting 120 min before exiting" - sleep 7200 -fi -exit $RESULT diff --git a/docs/java.md b/docs/java.md deleted file mode 100644 index 463b0e4b3..000000000 --- a/docs/java.md +++ /dev/null @@ -1,77 +0,0 @@ -# WARNING DEPRECATED - -The Java implementation of Monasca API is DEPRECATED and will be removed in a future release - -## Java Build - -Requires [monasca-common](https://github.com/openstack/monasca-common). First clone this repository and then do mvn install. Then return to monasca-api and: - - $ cd java - $ mvn clean package - -## StackForge Java Build - -There is a pom.xml in the base directory that should only be used for the StackForge build. The StackForge build is a rather strange build because of the limitations of the current StackForge java jobs and infrastructure. We have found that the API runs faster if built with maven 3 but the StackForge nodes only have maven 2. This build checks the version of maven and if not maven 3, it downloads a version of maven 3 and uses it. This build depends on jars that are from monasca-common. That StrackForge build uploads the completed jars to http://tarballs.openstack.org/ci/monasca-common, but they are just regular jars, and not in a maven repository and sometimes zuul takes a long time to do the upload. Hence, the first thing the maven build from the base project does is invoke [build_common.sh](/common/build_common.sh) in the common directory. This script clones monasca-common and then invokes maven 3 to build monasca-common in the common directory and install the jars in the local maven repository. - -Since this is all rather complex, that part of the build only works on StackForge so follow the simple instruction above if you are building your own monasca-api. - -Currently this build is executed on the bare-precise nodes in StackForge and they only have maven 2. So, this build must be kept compatible with Maven 2. If another monasca-common jar is added as a dependency to [/java/pom.xml](/java/pom.xml), it must also be added to download/download.sh. - -Combining monasca-common, monasca-thresh, monasca-api and monasca-persister into one build would vastly simplify the builds but that is a future task.` - -## Usage - - $ java -jar target/monasca-api.jar server config-file.yml - - -## Keystone Configuration - -For secure operation of the Monasca API, the API must be configured to use Keystone in the configuration file under the middleware section. Monasca only works with a Keystone v3 server. The important parts of the configuration are explained below: - -* serverVIP - This is the hostname or IP Address of the Keystone server -* serverPort - The port for the Keystone server -* useHttps - Whether to use https when making requests of the Keystone API -* truststore - If useHttps is true and the Keystone server is not using a certificate signed by a public CA recognized by Java, the CA certificate can be placed in a truststore so the Monasca API will trust it, otherwise it will reject the https connection. This must be a JKS truststore -* truststorePassword - The password for the above truststore -* connSSLClientAuth - If the Keystone server requires the SSL client used by the Monasca server to have a specific client certificate, this should be true, false otherwise -* keystore - The keystore holding the SSL Client certificate if connSSLClientAuth is true -* keystorePassword - The password for the keystore -* defaultAuthorizedRoles - An array of roles that authorize a user to access the complete Monasca API. User must have at least one of these roles. See below -* readOnlyAuthorizedRoles - An array of roles that authorize a user to only GET (but not POST, PUT...) metrics. See Keystone Roles below -* agentAuthorizedRoles - An array of roles that authorize only the posting of metrics. See Keystone Roles below -* adminAuthMethod - "password" if the Monasca API should adminUser and adminPassword to login to the Keystone server to check the user's token, "token" if the Monasca API should use adminToken -* adminUser - Admin user name -* adminPassword - Admin user password -* adminProjectId - Specify the project ID the api should use to request an admin token. Defaults to the admin user's default project. The adminProjectId option takes precedence over adminProjectName. -* adminProjectName - Specify the project name the api should use to request an admin token. Defaults to the admin user's default project. The adminProjectId option takes precedence over adminProjectName. -* adminToken - A valid admin user token if adminAuthMethod is token -* timeToCacheToken - How long the Monasca API should cache the user's token before checking it again - -### Keystone Roles - -The Monasca API has two levels of access: -* Full access - user can read/write metrics and Alarm Definitions and Alarms -* Agent access - user can only write metrics - -The reason for the "Agent access" level is because the Monasca Agent must be configured to use a Keystone user. Since the user and password are configured on all of the systems running the Monasca Agent, this user is most in danger of being compromised. If this user is limited to only writing metrics, then the damage can be limited. - -To configure the user to have full access, the user must have a role that is listed in defaultAuthorizedRoles. To configure a user to have only "Agent access", the user must have a role in agentAuthorizedRoles and none of the roles in defaultAuthorizedRoles. - -If you want to give users the ability to only view data, configure one or more roles in the readOnlyAuthorizedRoles list. - -## Design Overview - -### Architectural layers - -Requests flow through the following architectural layers from top to bottom: - -* Resource - * Serves as the entrypoint into the service. - * Responsible for handling web service requests, and performing structural request validation. -* Application - * Responsible for providing application level implementations for specific use cases. -* Domain - * Contains the technology agnostic core domain model and domain service definitions. - * Responsible for upholding invariants and defining state transitions. -* Infrastructure - * Contains technology specific implementations of domain services. diff --git a/docs/monasca-api-spec.md b/docs/monasca-api-spec.md deleted file mode 100644 index 88056e03b..000000000 --- a/docs/monasca-api-spec.md +++ /dev/null @@ -1,3709 +0,0 @@ - -# Monasca API - -Date: November 5, 2014 - -Document Version: v2.0 - - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Overview](#overview) - - [Metric Name and Dimensions](#metric-name-and-dimensions) - - [Name](#name) - - [Dimensions](#dimensions) - - [Text Representation](#text-representation) - - [Measurement](#measurement) - - [Value Meta](#value-meta) - - [Alarm Definitions and Alarms](#alarm-definitions-and-alarms) - - [Deterministic or non-deterministic alarms](#deterministic-or-non-deterministic-alarms) - - [Alarm Definition Expressions](#alarm-definition-expressions) - - [Syntax](#syntax) - - [Simple Example](#simple-example) - - [More Complex Example](#more-complex-example) - - [Compound alarm example](#compound-alarm-example) - - [Deterministic alarm example](#deterministic-alarm-example) - - [Non-deterministic alarm with deterministic sub expressions](#non-deterministic-alarm-with-deterministic-sub-expressions) - - [Changing Alarm Definitions](#changing-alarm-definitions) - - [Notification Methods](#notification-methods) -- [Common Request Headers](#common-request-headers) - - [Common Http Request Headers](#common-http-request-headers) - - [Non-standard request headers](#non-standard-request-headers) -- [Common Responses](#common-responses) -- [Paging](#paging) - - [Offset](#offset) - - [Limit](#limit) -- [JSON Results](#json-results) -- [Versions](#versions) - - [List Versions](#list-versions) - - [GET /](#get-) - - [Headers](#headers) - - [Path Parameters](#path-parameters) - - [Query Parameters](#query-parameters) - - [Request Body](#request-body) - - [Request Examples](#request-examples) - - [Response](#response) - - [Status code](#status-code) - - [Response Body](#response-body) - - [Response Examples](#response-examples) - - [Get Version](#get-version) - - [Get /{version_id}](#get-version_id) - - [Headers](#headers-1) - - [Path Parameters](#path-parameters-1) - - [Query Parameters](#query-parameters-1) - - [Request Body](#request-body-1) - - [Request Examples](#request-examples-1) - - [Response](#response-1) - - [Status code](#status-code-1) - - [Response Body](#response-body-1) - - [Response Examples](#response-examples-1) -- [Health Check](#heltchcheck) - - [Complex check](#complex_check) - - [Simple check](#simple_check) -- [Metrics](#metrics) - - [Create Metric](#create-metric) - - [POST /v2.0/metrics](#post-v20metrics) - - [Headers](#headers-2) - - [Path Parameters](#path-parameters-2) - - [Query Parameters](#query-parameters-2) - - [Request Body](#request-body-2) - - [Request Examples](#request-examples-2) - - [Single metric](#single-metric) - - [Single metric with value_meta](#single-metric-with-value_meta) - - [Array of metrics](#array-of-metrics) - - [Response](#response-2) - - [Status Code](#status-code) - - [Response Body](#response-body-2) - - [List metrics](#list-metrics) - - [GET /v2.0/metrics](#get-v20metrics) - - [Headers](#headers-3) - - [Path Parameters](#path-parameters-3) - - [Query Parameters](#query-parameters-3) - - [Request Body](#request-body-3) - - [Request Examples](#request-examples-3) - - [Response](#response-3) - - [Status Code](#status-code-1) - - [Response Body](#response-body-3) - - [Response Examples](#response-examples-2) - - [List dimension values](#list-dimension-values) - - [GET /v2.0/metrics/dimensions/names/values](#get-v20metricsdimensionsnamesvalues) - - [Headers](#headers-4) - - [Path Parameters](#path-parameters-4) - - [Query Parameters](#query-parameters-4) - - [Request Body](#request-body-4) - - [Request Examples](#request-examples-4) - - [Response](#response-4) - - [Status Code](#status-code-2) - - [Response Body](#response-body-4) - - [Response Examples](#response-examples-3) - - [List dimension names](#list-dimension-names) - - [GET /v2.0/metrics/dimensions/names](#get-v20metricsdimensionsnames) - - [Headers](#headers-5) - - [Path Parameters](#path-parameters-5) - - [Query Parameters](#query-parameters-5) - - [Request Body](#request-body-5) - - [Request Examples](#request-examples-5) - - [Response](#response-5) - - [Status Code](#status-code-3) - - [Response Body](#response-body-5) - - [Response Examples](#response-examples-4) -- [Measurements](#measurements) - - [List measurements](#list-measurements) - - [GET /v2.0/metrics/measurements](#get-v20metricsmeasurements) - - [Headers](#headers-6) - - [Path Parameters](#path-parameters-6) - - [Query Parameters](#query-parameters-6) - - [Request Body](#request-body-6) - - [Request Examples](#request-examples-6) - - [Response](#response-6) - - [Status Code](#status-code-4) - - [Response Body](#response-body-6) - - [Response Examples](#response-examples-5) -- [Metric Names](#metric-names) - - [List names](#list-names) - - [GET /v2.0/metrics/names](#get-v20metricsnames) - - [Headers](#headers-7) - - [Path Parameters](#path-parameters-7) - - [Query Parameters](#query-parameters-7) - - [Request Body](#request-body-7) - - [Request Examples](#request-examples-7) - - [Response](#response-7) - - [Status Code](#status-code-5) - - [Response Body](#response-body-7) - - [Response Examples](#response-examples-6) -- [Statistics](#statistics) - - [List statistics](#list-statistics) - - [GET /v2.0/metrics/statistics](#get-v20metricsstatistics) - - [Headers](#headers-8) - - [Path Parameters](#path-parameters-8) - - [Query Parameters](#query-parameters-8) - - [Request Body](#request-body-8) - - [Request Examples](#request-examples-8) - - [Response](#response-8) - - [Status Code](#status-code-6) - - [Response Body](#response-body-8) - - [Response Examples](#response-examples-7) -- [Notification Methods](#notification-methods-1) - - [Create Notification Method](#create-notification-method) - - [POST /v2.0/notification-methods](#post-v20notification-methods) - - [Headers](#headers-9) - - [Path Parameters](#path-parameters-9) - - [Query Parameters](#query-parameters-9) - - [Request Body](#request-body-9) - - [Request Examples](#request-examples-9) - - [Response](#response-9) - - [Status Code](#status-code-7) - - [Response Body](#response-body-9) - - [Response Examples](#response-examples-8) - - [List Notification Methods](#list-notification-methods) - - [GET /v2.0/notification-methods](#get-v20notification-methods) - - [Headers](#headers-10) - - [Path Parameters](#path-parameters-10) - - [Query Parameters](#query-parameters-10) - - [Request Body](#request-body-10) - - [Request Examples](#request-examples-10) - - [Response](#response-10) - - [Status Code](#status-code-8) - - [Response Body](#response-body-10) - - [Response Examples](#response-examples-9) - - [Get Notification Method](#get-notification-method) - - [GET /v2.0/notification-methods/{notification_method_id}](#get-v20notification-methodsnotification_method_id) - - [Headers](#headers-11) - - [Path Parameters](#path-parameters-11) - - [Query Parameters](#query-parameters-11) - - [Request Body](#request-body-11) - - [Request Examples](#request-examples-11) - - [Response](#response-11) - - [Status Code](#status-code-9) - - [Response Body](#response-body-11) - - [Response Examples](#response-examples-10) - - [Update Notification Method](#update-notification-method) - - [PUT /v2.0/notification-methods/{notification_method_id}](#put-v20notification-methodsnotification_method_id) - - [Headers](#headers-12) - - [Path Parameters](#path-parameters-12) - - [Query Parameters](#query-parameters-12) - - [Request Body](#request-body-12) - - [Request Examples](#request-examples-12) - - [Response](#response-12) - - [Status Code](#status-code-10) - - [Response Body](#response-body-12) - - [Response Examples](#response-examples-11) - - [Patch Notification Method](#patch-notification-method) - - [PATCH /v2.0/notification-methods/{notification_method_id}](#patch-v20notification-methodsnotification_method_id) - - [Headers](#headers-13) - - [Path Parameters](#path-parameters-13) - - [Query Parameters](#query-parameters-13) - - [Request Body](#request-body-13) - - [Request Examples](#request-examples-13) - - [Response](#response-13) - - [Status Code](#status-code-11) - - [Response Body](#response-body-13) - - [Response Examples](#response-examples-12) - - [Delete Notification Method](#delete-notification-method) - - [DELETE /v2.0/notification-methods/{notification_method_id}](#delete-v20notification-methodsnotification_method_id) - - [Headers](#headers-14) - - [Path Parameters](#path-parameters-14) - - [Query Parameters](#query-parameters-14) - - [Request Body](#request-body-14) - - [Request Examples](#request-examples-14) - - [Response](#response-14) - - [Status Code](#status-code-12) - - [Response Body](#response-body-14) - - [List supported Notification Method Types](#list-supported-notification-method-types) - - [GET /v2.0/notification-methods/types/](#get-v20notification-methodstypes) - - [Headers](#headers-15) - - [Query Parameters](#query-parameters-15) - - [Request Body](#request-body-15) - - [Request Examples](#request-examples-15) - - [Response](#response-15) - - [Status Code](#status-code-13) - - [Response Body](#response-body-15) - - [Response Examples](#response-examples-13) -- [Alarm Definitions](#alarm-definitions) - - [Create Alarm Definition](#create-alarm-definition) - - [POST /v2.0/alarm-definitions](#post-v20alarm-definitions) - - [Headers](#headers-16) - - [Path Parameters](#path-parameters-15) - - [Query Parameters](#query-parameters-16) - - [Request Body](#request-body-16) - - [Request Examples](#request-examples-16) - - [Response](#response-16) - - [Status Code](#status-code-14) - - [Response Body](#response-body-16) - - [Response Examples](#response-examples-14) - - [List Alarm Definitions](#list-alarm-definitions) - - [GET /v2.0/alarm-definitions](#get-v20alarm-definitions) - - [Headers](#headers-17) - - [Path Parameters](#path-parameters-16) - - [Query Parameters](#query-parameters-17) - - [Request Body](#request-body-17) - - [Request Examples](#request-examples-17) - - [Response](#response-17) - - [Status Code](#status-code-15) - - [Response Body](#response-body-17) - - [Response Examples](#response-examples-15) - - [Get Alarm Definition](#get-alarm-definition) - - [GET /v2.0/alarm-definitions/{alarm_definition_id}](#get-v20alarm-definitionsalarm_definition_id) - - [Headers](#headers-18) - - [Path Parameters](#path-parameters-17) - - [Query Parameters](#query-parameters-18) - - [Request Body](#request-body-18) - - [Response](#response-18) - - [Status Code](#status-code-16) - - [Response Body](#response-body-18) - - [Response Examples](#response-examples-16) - - [Update Alarm Definition](#update-alarm-definition) - - [PUT /v2.0/alarm-definitions/{alarm_definition_id}](#put-v20alarm-definitionsalarm_definition_id) - - [Headers](#headers-19) - - [Path Parameters](#path-parameters-18) - - [Query Parameters](#query-parameters-19) - - [Request Body](#request-body-19) - - [Request Examples](#request-examples-18) - - [Response](#response-19) - - [Status Code](#status-code-17) - - [Response Body](#response-body-19) - - [Response Examples](#response-examples-17) - - [Patch Alarm Definition](#patch-alarm-definition) - - [PATCH /v2.0/alarm-definitions/{alarm_definition_id}](#patch-v20alarm-definitionsalarm_definition_id) - - [Headers](#headers-20) - - [Path Parameters](#path-parameters-19) - - [Query Parameters](#query-parameters-20) - - [Request Body](#request-body-20) - - [Request Examples](#request-examples-19) - - [Response](#response-20) - - [Status Code](#status-code-18) - - [Response Body](#response-body-20) - - [Response Examples](#response-examples-18) - - [Delete Alarm Definition](#delete-alarm-definition) - - [DELETE /v2.0/alarm-definitions/{alarm_definition_id}](#delete-v20alarm-definitionsalarm_definition_id) - - [Headers](#headers-21) - - [Path Parameters](#path-parameters-20) - - [Query Parameters](#query-parameters-21) - - [Request Body](#request-body-21) - - [Request Examples](#request-examples-20) - - [Response](#response-21) - - [Status Code](#status-code-19) - - [Response Body](#response-body-21) -- [Alarms](#alarms) - - [List Alarms](#list-alarms) - - [GET /v2.0/alarms](#get-v20alarms) - - [Headers](#headers-22) - - [Path Parameters](#path-parameters-21) - - [Query Parameters](#query-parameters-22) - - [Request Body](#request-body-22) - - [Request Examples](#request-examples-21) - - [Response](#response-22) - - [Status Code](#status-code-20) - - [Response Body](#response-body-22) - - [Response Examples](#response-examples-19) - - [List Alarms State History](#list-alarms-state-history) - - [GET /v2.0/alarms/state-history](#get-v20alarmsstate-history) - - [Headers](#headers-23) - - [Path Parameters](#path-parameters-22) - - [Query Parameters](#query-parameters-23) - - [Request Body](#request-body-23) - - [Response](#response-23) - - [Status Code](#status-code-21) - - [Response Body](#response-body-23) - - [Response Examples](#response-examples-20) - - [Get Alarm](#get-alarm) - - [GET /v2.0/alarms/{alarm_id}](#get-v20alarmsalarm_id) - - [Headers](#headers-24) - - [Path Parameters](#path-parameters-23) - - [Query Parameters](#query-parameters-24) - - [Request Body](#request-body-24) - - [Response](#response-24) - - [Status Code](#status-code-22) - - [Response Body](#response-body-24) - - [Response Examples](#response-examples-21) - - [Update Alarm](#update-alarm) - - [PUT /v2.0/alarms/{alarm_id}](#put-v20alarmsalarm_id) - - [Headers](#headers-25) - - [Path Parameters](#path-parameters-24) - - [Query Parameters](#query-parameters-25) - - [Request Body](#request-body-25) - - [Request Examples](#request-examples-22) - - [Response](#response-25) - - [Status Code](#status-code-23) - - [Response Body](#response-body-25) - - [Response Examples](#response-examples-22) - - [Patch Alarm](#patch-alarm) - - [PATCH /v2.0/alarms/{alarm_id}](#patch-v20alarmsalarm_id) - - [Headers](#headers-26) - - [Path Parameters](#path-parameters-25) - - [Query Parameters](#query-parameters-26) - - [Request Body](#request-body-26) - - [Request Examples](#request-examples-23) - - [Response](#response-26) - - [Status Code](#status-code-24) - - [Response Body](#response-body-26) - - [Response Examples](#response-examples-23) - - [Delete Alarm](#delete-alarm) - - [DELETE /v2.0/alarms/{alarm_id}](#delete-v20alarmsalarm_id) - - [Headers](#headers-27) - - [Path Parameters](#path-parameters-26) - - [Query Parameters](#query-parameters-27) - - [Request Body](#request-body-27) - - [Request Examples](#request-examples-24) - - [Response](#response-27) - - [Status Code](#status-code-25) - - [Response Body](#response-body-27) - - [List Alarm State History](#list-alarm-state-history) - - [GET /v2.0/alarms/{alarm_id}/state-history](#get-v20alarmsalarm_idstate-history) - - [Headers](#headers-28) - - [Path Parameters](#path-parameters-27) - - [Query Parameters](#query-parameters-28) - - [Request Body](#request-body-28) - - [Request Data](#request-data) - - [Response](#response-28) - - [Status Code](#status-code-26) - - [Response Body](#response-body-28) - - [Response Examples](#response-examples-24) -- [Logs](#logs) - - [Create Logs](#create-logs) - - [POST /v2.0/logs](#post-logs) - - [Headers](#headers-29) - - [Path Parameters](#path-parameters-28) - - [Query Parameters](#query-parameters-29) - - [Request Body](#request-body-29) - - [Request Examples](#request-examples-25) - - [Response](#response-29) - - [Status Code](#status-code-27) - - [Response Body](#response-body-28) -- [License](#license) - - - -# Overview -This document describes the Monasca API v2.0, which supports Monitoring as a Service (MONaaS). The Monasca API provides a RESTful JSON interface for interacting with and managing monitoring related resources. - -The API consists of eight main resources: - -1. Versions - Provides information about the supported versions of the API. -2. Metrics - Provides for storage and retrieval of metrics. -3. Measurements - Operations for querying measurements of metrics. -4. Statistics - Operations for evaluating statistics of metrics. -5. Notification Methods - Represents a method, such as email, which can be associated with an alarm definition via an action. When an alarm is triggered notification methods associated with the alarm definition are triggered. -6. Alarm Definitions - Provides CRUD operations for alarm definitions. -7. Alarms - Provides CRUD operations for alarms, and querying the alarm state history. -8. Logs - Provides for storage of logs. - -Before using the API, you must first get a valid auth token from Keystone. All API operations require an auth token specified in the header of the http request. - - -## Metric Name and Dimensions -A metric is uniquely identified by a name and set of dimensions. - -### Name -Defines the name of a metric. A name is of type string(255). The name may include any characters except the following: `> < = { } ( ) , ' " \ ; &`. Note that JSON does allow control characters (such as `\n`), however these should not be used in metric names. - -### Dimensions -A dictionary of (key, value) pairs. The key and value are of type string(255). -Dimension keys may not begin with '_' (underscore). The dimension key and value -strings may include any characters except the following: -`> < = { } , ' " \ ; &`. Note that JSON does allow control characters (such as -`\n`), however these should not be used in dimension keys or values. Dimension -keys and values must not be empty. - -### Text Representation -In this document, metrics will be represented in the form `name{name=value,name=value}` where name is the metric name and the name=value pairs in the curly braces are the dimensions. For example, `cpu.idle_perc{service=monitoring,hostname=mini-mon}` represents a metric with the name "cpu.idle_perc" and the dimensions "service=monitoring" and "hostname=mini-mon". - -## Measurement -A measurement is a value with a timestamp for a specific Metric. The value is represented by a double, e.g. 42.0 or 42.42. - -### Value Meta -Optionally, a measurement may also contain extra data about the value which is known as value meta. Value meta is a set of name/value pairs that add textual data to the value of the measurement. The value meta will be returned from the API when the measurement is read. Only measurements that were written with value meta will have the key value pairs when read from the API. The value meta is ignored when computing statistics such as average on measurements. - -For an example of how value meta is used, imagine this metric: http_status{url: http://localhost:8070/healthcheck, hostname=devstack, service=object-storage}. The measurements for this metric have a value of either 1 or 0 depending if the status check succeeded. If the check fails, it would be helpful to have the actual http status code and error message if possible. So instead of just a value, the measurement will be something like: -{Timestamp=now(), value=1, value_meta{http_rc=500, error="Error accessing MySQL"}} - -Up to 16 separate key/value pairs of value meta are allowed per measurement. The keys are required and are trimmed of leading and trailing whitespace and have a maximum length of 255 characters. The value is a string and value meta (with key, value and '{"":""}' combined) has a maximum length of 2048 characters. The value can be an empty string. Whitespace is not trimmed from the values. - -## Alarm Definitions and Alarms - -Alarm Definitions are policies that specify how Alarms should be created. By using Alarm Definitions, the user doesn't have to create individual alarms for each system or service. Instead, a small number of Alarm Definitions can be managed and Monasca will create Alarms for systems and services as they appear. - -An Alarm Definition has an expression for evaluating one or more metrics to determine if there is a problem. Depending on the Alarm Definition expression and match_by value, Monasca will create one or more Alarms depending on the measurements that are received. The match_by parameter specifies which dimension or dimensions should be used to determine if one or more alarms will be created. - -An example is the best way to show this. Imagine two Alarm Definitions have been created: - -Alarm Definition 1 has an expression of `avg(cpu.idle_perc{service=monitoring}) < 20` and the match_by parameter is not set. Alarm Definition 2 has an expression of `min(cpu.idle_perc{service=monitoring}) < 10` and the match_by parameter is set to `hostname`. - -When a measurement for the metric cpu.idle_perc{service=monitoring,hostname=mini-mon} is first received after the Metric Definitions have been created, an Alarm is created for both Alarm Definitions. The metric is added to both Alarms. The following set of Alarm Definitions and Alarm would exist: - -Alarm Definition 1: -``` -Alarm 1 - Metrics: cpu.idle_perc{service=monitoring,hostname=mini-mon} -``` - -Alarm Definition 2: -``` -Alarm 1 - Metrics: cpu.idle_perc{service=monitoring,hostname=mini-mon} -``` - -Now, when a measurement for the metric cpu.idle_perc{service=monitoring,hostname=devstack} is received, the two Alarm Definitions define different behaviors. Since the value for the hostname dimension is different from the value for the existing Alarm from Alarm Definition 2, and Alarm Definition 2 has specified a match_by parameter on `hostname`, a new Alarm will be created. Alarm Definition 1 does not have a value for match_by, so this metric is added to the existing Alarm. This gives us the following set of Alarm Definitions and Alarms: - -Alarm Definition 1: -``` -Alarm 1 - Metrics: cpu.idle_perc{service=monitoring,hostname=mini-mon} and cpu.idle_perc{service=monitoring,hostname=devstack} -``` - -Alarm Definition 2: -``` -Alarm 1 - Metrics: cpu.idle_perc{service=monitoring,hostname=mini-mon} -Alarm 2 - Metrics: cpu.idle_perc{service=monitoring,hostname=devstack} -``` - -Alarm Definition 1 is evaluating the status of the monitoring service as a whole, while Alarm Definition 2 evaluates each system in the service. - -Now if another system is configured into the monitoring service, then when a measurement is received for its cpu.idle_perc metric, that metric will be added to the Alarm for Alarm Definition 1 and a new Alarm will be created for Alarm Definition 2, all without any user intervention. The system will be monitored without requiring the user to explicitly add alarms for the new system as other monitoring systems require. - -If an Alarm Definition expression has multiple subexpressions, for example, `avg(cpu.idle_perc{service=monitoring}) < 10 or avg(cpu.user_perc{service=monitoring}) > 60` and a match_by value set, then the metrics for both subexpressions must have the same value for the dimension specified in match_by. For example, assume this Alarm Definition: - -Expression `avg(cpu.idle_perc{service=monitoring}) < 10 or avg(cpu.user_perc{service=monitoring}) > 60` and match_by is `hostname` - -Now assume a measurement for each of these four metrics is received by Monasca: - -``` -cpu.idle_perc{service=monitoring,hostname=mini-mon} -cpu.idle_perc{service=monitoring,hostname=devstack} -cpu.user_perc{service=monitoring,hostname=mini-mon} -cpu.user_perc{service=monitoring,hostname=devstack} -``` - -This will cause two Alarms to be created, one for each unique value of hostname. One Alarm will have the metrics: - -``` -avg(cpu.idle_perc{service=monitoring,hostname=mini-mon}) and avg(cpu.user_perc{service=monitoring,hostname=mini-mon}) -``` - -and another will have the metrics: - -``` -avg(cpu.idle_perc{service=monitoring,hostname=devstack}) and avg(cpu.user_perc{service=monitoring,hostname=devstack}) -``` - -Note that the value of match_by, "hostname", is used to match the metrics between the subexpressions, hence the name 'match_by'. - -As a negative example, assume a measurement for the below metric is received by Monasca: - -``` -cpu.idle_perc{service=nova,hostname=nova1} -``` - -This metric does not have the service=monitoring dimension, so it will not match the Alarm Definition and no Alarm will be created or metric added to an existing alarm. - -An Alarm will only get created when measurements are seen for metrics that match all subexpressions in the Alarm Definition. If match_by is set, then each metric must have a value for at least one of the values in match_by. If match_by is not set, only one Alarm will be created for an Alarm Definition. - -The value of the match_by parameter can also be a list, for example, `hostname,device`. In that case, Alarms will be created and metrics added based on all values of match_by. - -For example, assume the Alarm Definition with the expression `max(disk.space_used_perc{service=monitoring}) > 90` and match_by set to `hostname`. This will create one alarm for each system that contains all of the metrics for each device. If instead, the match_by is set to `hostname,device`, then a separate alarm will be created for each device in each system. - -To illustrate, assume a measurement for each of these four metrics is received by Monasca: -``` -disk.space_used_perc{device:/dev/sda1,hostname=mini-mon} -disk.space_used_perc{device:tmpfs,hostname=mini-mon} -disk.space_used_perc{device:/dev/sda1,hostname=devstack} -disk.space_used_perc{device:tmpfs,hostname=devstack} -``` - -Given the expression `max(disk.space_used_perc) > 90` and match_by set to `hostname`, this will create two alarms: - -``` -Alarm 1 - Metrics: disk.space_used_perc{device:/dev/sda1,hostname=mini-mon}, disk.space_used_perc{device:tmpfs,hostname=mini-mon} -Alarm 2 - Metrics: disk.space_used_perc{device:/dev/sda1,hostname=devstack}, disk.space_used_perc{device:tmpfs,hostname=devstack} -``` - -If instead, match_by is set to `hostname,device`, then four alarms will be created: - -``` -Alarm 1 - Metrics: disk.space_used_perc{device:/dev/sda1,hostname=mini-mon} -Alarm 2 - Metrics: disk.space_used_perc{device:tmpfs,hostname=mini-mon} -Alarm 3 - Metrics: disk.space_used_perc{device:/dev/sda1,hostname=devstack} -Alarm 4 - Metrics: disk.space_used_perc{device:tmpfs,hostname=devstack} -``` - -The second value of match_by will create an Alarm for each device. For each device that fills up, a separate Alarm will be triggered. The first value of match_by will give you less Alarms to display in the dashboard but if an Alarm has already triggered for one device and another device fills up, the Alarm won't be triggered again. - -If desired, an Alarm Definition can be created that exactly matches a set of metrics. The match_by should not be set. Only one Alarm will be created for that Alarm Definition. - -Alarms have a state that is set by the Threshold Engine based on the incoming measurements. - -* UNDETERMINED - No measurements have been received for at least one of the subexpressions in any period for a least 2 * periods (see below for definition of period and periods -* OK - Measurements have been received and the Alarm Definition Expression evaluates to false for the given measurements -* ALARM - Measurements have been received and the Alarm Definition Expression evaluates to true for the given measurements - -The Alarms are evaluated and their state is set once per minute. - -Alarms contain three fields that may be edited via the API. These are the alarm state, lifecycle state, and the link. The alarm state is updated by Monasca as measurements are evaluated, and can be changed manually as necessary. The lifecycle state and link fields are not maintained or updated by Monasca, instead these are provided for storing information related to external tools. - -### Deterministic or non-deterministic alarms - -By default all alarm definitions are assumed to be **non-deterministic**. -There are 3 possible states such alarms can transition to: *OK*, *ALARM*, -*UNDETERMINED*. On the other hand, alarm definitions can be also -**deterministic**. In that case alarm is allowed to transition only: *OK* -and *ALARM* state. - -Following expression ```avg(cpu.user_perc{hostname=compute_node_1}) > 10``` means that potential -alarm and transition to *ALARM* state is restricted to specific machine. If for some reason that -host would crash and stay offline long enough, there would be no measurements received from it. -In this case alarm will transition to *UNDETERMINED* state. - -On the other hand, some metrics are irregular and look more like events. One case is -metric created only if something critical happens in the system. -For example an error in log file or deadlock in database. -If non-deterministic alarm definition would be created using expression ```count(log.error{component=mysql}) >= 1)```, -that alarm could stay in *UNDETERMINED* state for most of its lifetime. -However, from operator point of view, if there are no errors related to MySQL, everything works correctly. -Answer to that situation is creating *deterministic* alarm definition -using expression ```count(log.error{component=mysql}, deterministic) >= 1```. - -The deterministic alarm's main trait is preventing from transition to *UNDETERMINED* state. -The alarm should be *OK* if no data is received. Also such alarms transition to *OK* immediately when created, -rather than to *UNDETERMINED* state. - -Finally, it must be mentioned that alarm definition can be composed of multiple expressions and -that *deterministic* is actually part of it. The entire alarm definition is considered *deterministic* -only if all of its expressions are such. Otherwise the alarm is *non-deterministic*. - -For example: -``` -avg(disk.space_used_perc{hostname=compute_node_1}) >= 99 - and -count(log.error{hostname=compute_node_1,component=kafka},deterministic) >= 1 -``` -potential alarm will transition to *ALARM* state if there is no usable disk space left and kafka starts to report errors regarding -inability to save data to it. Second expression is *deterministic*, however entire alarm will be kept in *UNDETERMINED* state -until such situation happens. - -On the other hand, expression like this: -``` -avg(disk.space_used_perc{hostname=compute_node_1},deterministic) >= 99 - and -count(log.error{hostname=compute_node_1,component=kafka},deterministic) >= 1 -``` -makes entire alarm *deterministic*. In other words - *all parts of alarm's expression -must be marked as deterministic in order for entire alarm to be considered such*. -Having definition like one above, potential alarm will stay in *OK* state as long as there is enough -disk space left at *compute_node_1* and there are no errors reported from *kafka* running -at the same host. - -## Alarm Definition Expressions -The alarm definition expression syntax allows the creation of simple or complex alarm definitions to handle a wide variety of needs. Alarm expressions are evaluated every 60 seconds. - -An alarm expression is a boolean equation which is used to evaluate the state of an alarm based on the received measurements. If the expression evaluates to true the state of the alarm to be set to ALARM. If it evaluates to false, the state of the alarm will be set to OK. - -### Syntax - -At the highest level, you have an expression, which is made up of one or more subexpressions, joined by boolean operators. Parenthesis can be used around groups of subexpressions to indicated higher precedence. In a BNF style format where items enclosed in [] are optional, '*' means zero or more times, and '|' means or. - -```` - - ::= [(and | or) ]* -```` - -More formally, taking boolean operator precedence into account, where 'and' has higher precedence than 'or', results in the following. - -```` - - ::= - | - - - ::= - | - -```` -Each subexpression is made up of several parts with a couple of options: - -```` - - ::= '(' [',' deterministic] [',' period] ')' threshold_value ['times' periods] - | '(' expression ')' - -```` -Period must be an integer multiple of 60. The default period is 60 seconds. - -Expression is by default **non-deterministic** (i.e. when expression does -not contain *deterministic* keyword). If however **deterministic** -option would be desired, it is enough to have *deterministic* keyword -inside expression. - -The logical_operators are: `and` (also `&&`), `or` (also `||`). - -```` - ::= 'and' | '&&' - ::= 'or' | '||' -```` - -A metric can be a metric name only or a metric name followed by a list of dimensions. The dimensions further qualify the metric name. - - -```` - - ::= metric_name - | metric_name '{' '}' - -```` - -Any number of dimensions can follow the metric name. - -```` - - ::= - | ',' - -```` - -A dimension is simply a key-value pair. - -```` - - ::= dimension_name '=' dimension_value - -```` - -The relational_operators are: `lt` (also `<`), `gt` (also `>`), `lte` (also `<=`), `gte` (also `>=`). - - -```` - - ::= 'lt' | '<' | 'gt' | '>' | 'lte' | '<=' | 'gte' | '>=' - -```` -The list of available statistical functions include the following. - -``` - - ::= 'min' | 'max' | 'sum' | 'count' | 'avg' | 'last' - -``` - -where 'avg' is the arithmetic average and last is the single most recent value of the metric. When using the last function, the values for 'period' and 'periods' will be ignored. Note, threshold values are always in the same units as the metric that they are being compared to. - - -#### Simple Example -In this example the metric uniquely identified with the name `cpu.system_perc` and dimension `hostname=host.domain.com` is compared to the threshold 95. - -``` -cpu.system_perc{hostname=host.domain.com} > 95 -``` - -#### More Complex Example -In this example the average of the same metric as in the previous example is evaluated over a 120 second period for 3 times so that the expression will evaluate to true if the average is greater than 95 for a total of 360 seconds. - -``` -avg(cpu.system_perc{hostname=host.domain.com}, 120) > 95 times 3 -``` - -Note that period is the number of seconds for the measurement to be done on. They can only be in a multiple of 60. Periods is how many times in a row that this expression must be true before triggering the alarm. Both period and periods are optional and default to 60 and 1 respectively. - -Functions work on all metric measurements during the period time frame. - -* min (returns the minimum of all the values) -* max (returns the maximum of all the values) -* sum (returns the sum of all the values) -* count (returns the number of metric observations) -* avg (returns the average of all the values) -* last (returns the single most recent value, ignores values for 'period' and 'periods') - -The metric is a complex identifier that says the name and optional dimensions. - -#### Compound alarm example -In this example a compound alarm expression is evaluated involving two thresholds. - -``` -avg(cpu.system_perc{hostname=hostname.domain.com}) > 90 or avg(disk_read_ops{hostname=hostname.domain.com, device=vda}, 120) > 1000 -``` - -#### Deterministic alarm example -In this example alarm is created with one expression which is deterministic - -``` -count(log.error{}, deterministic) > 1 -``` - -#### Non-deterministic alarm with deterministic sub expressions -In this example alarm's expression is composed of 3 parts where two of them -are marked as **deterministic**. However entire expression is non-deterministic because -of the 3rd expression. - -``` -count(log.error{}, deterministic) > 1 or count(log.warning{}, deterministic) > 1 and avg(cpu.user_perc{}) > 10 -``` - -### Changing Alarm Definitions - -Once an Alarm Definition has been created, the value for match_by and any metrics in the expression cannot be changed. This is because those fields control the metrics used to create Alarms and Alarms may already have been created. The function, operator, period, periods and any boolean operators can change, but not the metrics in subexpressions or the number of subexpressions. All other fields in an Alarm Definition can be changed. - -The only option to change metrics or match_by is to delete the existing Alarm Definition and create a new one. Deleting an Alarm Definition will delete all Alarms associated with it. - -## Notification Methods -Notification methods are resources used to specify a notification name, type and address that notifications can be sent to. After a notification method has been created, it can be associated with actions in alarm definitions, such that when an alarm state transition occurs, one or more notifications can be sent. - -Currently, notification method types of email, PagerDuty, webhook, Slack, Hipchat (discontinued) and Jira are supported. In the case of email, the address is the email address. In the case of PagerDuty, the address is the PagerDuty Service API Key. In the case of a webhook, the address is the URL of the webhook. See the Monasca Notification service plugin [documentation](https://opendev.org/openstack/monasca-notification#plugins) for more information. - -# Common Request Headers -This section documents the common request headers that are used in requests. - -## Common Http Request Headers -The standard Http request headers that are used in requests. - -* Content-Type - The Internet media type of the request body. Used with POST and PUT requests. Must be `application/json`. -* Accept - Internet media types that are acceptable in the response. Must be application/json. -* X-Requested-With (optional) - Which headers are requested to be allowed. Filled in by browser as part of the CORS protocol. -* Origin (optional) - The origin of page that is requesting cross origin access. Filled in by browser as part of the CORS protocol. - -## Non-standard request headers -The non-standard request headers that are used in requests. - -* X-Auth-Token (string, required) - Keystone auth token - -# Common Responses -The Monasca API utilizes HTTP response codes to inform clients of the success or failure of each request. Clients should use the HTTP response code to trigger error handling if necessary. This section discusses various API error responses. - -* 200 - A request succeeded. -* 201 - A resource has been successfully created. -* 204 - No content -* 400 - Bad request -* 401 - Unauthorized -* 404 - Not found -* 409 - Conflict -* 422 - Unprocessable entity - -# Paging -The Monasca API implements a paging mechanism to allow users to 'page' through result sets returned from the API. The paging functionality is limited to resources that return unbounded lists of results. This permits the user to consume as much data from the API as is needed without placing undo memory consumption burdens on the Monasca API Server. The paging mechanism is accomplished by allowing the user to specify an offset and a limit in the request URL as query parameters. - -For example: - -``` -"http://192.168.10.4:8070/v2.0/metrics/measurements?offset=2015-03-03T05%3A21%3A55Z&limit=1000&name=cpu.system_perc&dimensions=hostname%3Adevstack&start_time=2014-07-18T03%3A00%3A00Z" - -``` - -Results sets that would otherwise return more results if there had not been a limit will include a next link with the offset prepopulated. The user only need use the next link to get the next set of results. - -If no limit is specified in the request URL, then a server-wide configurable limit is applied. - - -## Offset -Offsets can be either identifier offsets, timestamp offsets or combinational offsets that have an identifier part and timestamp part. The identifier can be an integer or string (including hexadecimal numbers). The use of either integer, string, timestamp or combination is determined by the resource being queried. - -For example, an integer offset would look like this: - -``` -offset=999 - -``` -Integer offsets are zero based. - -A string offset would look like this: - -``` -offset=c60ec47e-5038-4bf1-9f95-4046c6e9a759 - -``` - -A hexadecimal string offset would look like this: - -``` - -offset=01ce0acc66131296c8a17294f39aee44ea8963ec - -``` - -A timestamp offset would look like this: - -``` -offset=2104-01-01T00:00:01Z - -``` - -A dimension value offset would look as follows: - -``` -offset=dimensionValue2 - -``` -A combinational offset with hexdecimal id would look as follows: -``` -offset=01ce0acc66131296c8a17294f39aee44ea8963ec_2104-01-01T00:00:01Z -``` - -Different resources use different offset types because of the internal implementation of different resources depends on different types of mechanisms for indexing and identifying resources. For example, the offset in measurement resources contains both ID and timestamp. The type and form of the offsets for each resource can be determined by referring to the examples in each resource section below. - -The offset is determined by the ID and/or timestamp values of the last element in the result list. Users wishing to manually create a query URL can use the ID and/or timestamp of the last element in the previously returned result set as the offset. The proceeding result set will return all elements with an ID greater than the ID in the offset, and if the offset is two-part, also all the elements with the same ID as that in the offset and having a timestamp later than the timestamp value in the offset. The automatically generated offset in the next link does exactly this; it uses the ID and/or timestamp in the last element. - -The offset can take the form of an integer ID, string ID, timestamp, or a combination of both ID and timestamp, but the user should treat the offset as an opaque reference. When using offsets in manually generated URLs, users enter them as strings that look like integers, timestamps, or strings. Future releases may change the type and form of the offsets for each resource. - -## Limit -The Monasca API has a server-wide default limit that is applied. It is -hard-coded to `10,000` entries. Users may specify their own limit in the URL, -but the server-wide limit may not be exceeded, e.g.: - -``` -limit=5000 - -``` -# JSON Results -All Monasca API results are in the form of JSON. For resources that return a list of elements, the JSON object returned will contain a 'links' array and an 'elements' array. - -The 'links' array will contain a 'self' element that is the original URL of the request that was used to generate the result. The 'links' array may also contain a 'next' element if the number of elements in the result would exceed the query limit. The 'next' link can be used to query the Monasca API for the next set of results, thus allowing the user to page through lengthy data sets. - -The 'elements' array will contain the items from the resource that match the query parameters. Each element will have an 'id' element. The 'id' element of the last item in the elements list is used as the offset in the 'next' link. - -For example: - -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/metrics&limit=2" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/metrics?offset=1&limit=2" - } - ], - "elements": [ - { - "id": 0, - "name": "name1", - "dimensions": { - "key1": "value1" - } - }, - { - "id": 1, - "name": "name2", - "dimensions": { - "key1": "value1" - } - } - ] -} -``` - -# Versions -The versions resource supplies operations for accessing information about supported versions of the API. - -## List Versions -Lists the supported versions of the Monasca API. - -### GET / - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -GET / HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Accept: application/json -Cache-Control: no-cache -``` - -### Response -#### Status code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of supported versions. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/" - } - ], - "elements": [ - { - "id": "v2.0", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0" - } - ], - "status": "CURRENT", - "updated": "2014-07-18T03:25:02.423Z" - } - ] -} -``` -___ - -## Get Version -Gets detail about the specified version of the Monasca API. - -### Get /{version_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -* version_id (string, required) - Version ID of API - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/ HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status code -* 200 - OK - -#### Response Body -Returns a JSON version object with details about the specified version. - -#### Response Examples -``` -{ - "id":"v2.0", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/" - } - ], - "status":"CURRENT", - "updated":"2014-07-18T03:25:02.423Z" -} -``` -___ - -# Healthcheck -The Monasca ApI comes with a built-in healthcheck mechanism. It is available in two flavours, both accessible -under `/healthcheck` endopoint. - -## Complex check -The complex check not only returns a response with success code if Monasca API is up and running by it also verifies if -dependant components , such as __Kafka__, __Alarm database__ (MariadDB/MySQL, PostgreSQL), __Metrics database__ (Cassandra, InfluxdDB) -are healthy too. - -Monasca API will respond with following codes: -* 200 - both API and external components are healthy. -* 503 - API is running but problems with peripheral components have been spotted. - -Example: `curl -XGET 192.168.10.6:8070/healthcheck` - -### Peripheral checks -* __Kafka__ is considered healthy if connection to broker can be established and configured topics can be found. -* __Alarm Database__ (MariaDB/MySQL, PostgreSQL) is considered healthy if connection to database can be established - and sample query can be executed. -* __Time Series Database__ (TSDB) is considered healthy if: `InfluxDB` is set health check is verified according to the -InfluxDB documentation ([/ping](https://docs.influxdata.com/influxdb/v1.1/tools/api/)), `Cassandra` is set health check is verified through new connection to the database. - -## Simple check -The simple check only returns response only if Monasca API is up and running. It does not return any data -because it is accessible only for `HEAD` request. If the Monasca Api is up and running the following response code: -`204` is expected. - -Example: `curl -XHEAD 192.168.10.6:8070/healtcheck` - -# Metrics -The metrics resource allows metrics to be created and queried. The `X-Auth-Token` is used to derive the tenant that submits metrics. Metrics are stored and scoped to the tenant that submits them, or if the `tenant_id` query parameter is specified and the tenant has the `monitoring-delegate` role, the metrics are stored using the specified tenant ID. Note that several of the GET methods also support the tenant_id query parameter, but the `monasca-admin` role is required to get cross-tenant metrics, statistics, etc.. - -## Create Metric -Create metrics. - -### POST /v2.0/metrics - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID to create metrics on behalf of. This parameter can be used to submit metrics from one tenant, to another. Normally, this parameter is used when the Agent is being run as an operational monitoring tenant, such as monitoring OpenStack infrastructure, and needs to submit metrics for an OpenStack resource, such as a VM, but those metrics need to be accessible to the tenant that owns the resource. Usage of this query parameter is restricted to users with the `monitoring-delegate` role. - -#### Request Body -Consists of a single metric object or an array of metric objects. A metric has the following properties: - -* name (string(255), required) - The name of the metric. -* dimensions ({string(255): string(255)}, optional) - A dictionary consisting of (key, value) pairs used to uniquely identify a metric. -* timestamp (string, required) - The timestamp in milliseconds from the Epoch. -* value (float, required) - Value of the metric. Values with base-10 exponents greater than 126 or less than -130 are truncated. -* value_meta ({string(255): string}(2048), optional) - A dictionary consisting of (key, value) pairs used to add information about the value. Value_meta key value combinations must be 2048 characters or less including '{"":""}' 7 characters total from every json string. - -The name and dimensions are used to uniquely identify a metric. - -#### Request Examples - -##### Single metric -POST a single metric. - -``` -POST /v2.0/metrics HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache - -{ - "name":"name1", - "dimensions":{ - "key1":"value1", - "key2":"value2" - }, - "timestamp":1405630174123, - "value":1.0 -} -``` - -##### Single metric with value_meta -POST a single metric with value_meta. - -``` -POST /v2.0/metrics HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache - -{ - "name":"name1", - "dimensions":{ - "key1":"value1", - "key2":"value2" - }, - "timestamp":1405630174123, - "value":1.0, - "value_meta":{ - "key1":"value1", - "key2":"value2" - } -} -``` - -##### Array of metrics -POST an array of metrics. - -``` -POST /v2.0/metrics HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache - -[ - { - "name":"name1", - "dimensions":{ - "key1":"value1", - "key2":"value2" - }, - "timestamp":1405630174123, - "value":1.0 - }, - { - "name":"name2", - "dimensions":{ - "key1":"value1", - "key2":"value2" - }, - "timestamp":1405630174123, - "value":2.0, - "value_meta":{ - "key1":"value1", - "key2":"value2" - } - } -] -``` - -### Response -#### Status Code -* 204 - No Content - -#### Response Body -This request does not return a response body. -___ - -## List metrics -Get metrics - -#### GET /v2.0/metrics - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID to from which to get metrics. This parameter can be used to get metrics from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* name (string(255), optional) - A metric name to filter metrics by. -* dimensions (string, optional) - A dictionary to filter metrics by specified as a comma separated array of (key, value) pairs as `key1:value1,key2:value2, ...`, leaving the value empty `key1,key2:value2` will return all values for that key, multiple values for a key may be specified as `key1:value1|value2|...,key2:value4,...` -* start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. This is useful for only listing metrics that have measurements since the specified start_time. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. Combined with start_time, this can be useful to only list metrics that have measurements in between the specified start_time and end_time. -* offset (integer (InfluxDB) or hexadecimal string (Vertica), optional) -* limit (integer, optional) - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics?name=metric1&dimensions=key1:value1 HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of metric definition objects with the following fields: - -* name (string) -* dimensions ({string(255): string(255)}) - -#### Response Examples -```` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/metrics" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/metrics?offset=1" - } - ], - "elements": [ - { - "id": 0, - "name": "name1", - "dimensions": { - "key1": "value1" - } - }, - { - "id": 1, - "name": "name2", - "dimensions": { - "key1": "value1" - } - } - ] -} -```` -___ - -## List dimension values -Get dimension values - -#### GET /v2.0/metrics/dimensions/names/values - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID to from which to get dimension values. This parameter can be used to get dimension values from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* metric_name (string(255), optional) - A metric name to filter dimension values by. -* dimension_name (string(255), required) - A dimension name to filter dimension values by. -* start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. -* offset (string(255), optional) - The dimension values are returned in alphabetic order, and the offset is the dimension name after which to return in the next pagination request. -* limit (integer, optional) - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics/dimensions/names/values?dimension_name=dimension_name HTTP/1.1 -Host: 192.168.10.4:8080 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of dimension values. - -#### Response Examples -```` -{ - "elements": [ - { - "dimension_value": "value2" - }, - { - "dimension_value": "value3" - } - ], - "links": [ - { - "href": "http://192.168.10.6:8070/v2.0/metrics/dimensions/names/values?dimension_name=dim_name&offset=value1&limit=2", - "rel": "self" - }, - { - "href": "http://192.168.10.6:8070/v2.0/metrics/dimensions/names/values?offset=value3&dimension_name=dim_name&limit=2", - "rel": "next" - } - ] -} -```` -___ - -## List dimension names -Get dimension names - -#### GET /v2.0/metrics/dimensions/names - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID from which to get dimension names. This parameter can be used to get dimension names from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* metric_name (string(255), optional) - A metric name to filter dimension names by. -* start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. -* offset (string(255), optional) - The dimension names are returned in alphabetic order, and the offset is the dimension name after which will return in the next pagination request. -* limit (integer, optional) - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics/dimensions/names HTTP/1.1 -Host: 192.168.10.6:8070 -Content-Type: application/json -X-Auth-Token: 818d3d8f10bd4987adb3f84bc94a801d -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of dimension names. -#### Response Examples -```` -{ - "elements": [ - { - "dimension_name": "name2" - }, - { - "dimension_name": "name3" - } - ], - "links": [ - { - "href": "http://192.168.10.6:8070/v2.0/metrics/dimensions/names?offset=name1&limit=2", - "rel": "self" - }, - { - "href": "http://192.168.10.6:8070/v2.0/metrics/dimensions/names?offset=name3&limit=2", - "rel": "next" - } - ] -} -```` - -# Measurements -Operations for accessing measurements of metrics. - -## List measurements -Get measurements for metrics. - -If `group_by` is not specified, metrics must be fully qualified with name and dimensions so that only measurements are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions. - -If users do not wish to see measurements for a single metric, but would prefer to have measurements from multiple metrics combined, a 'merge_metrics' flag can be specified. when 'merge_metrics' is set to true (**merge_metrics=true**), all measurements for all metrics that satisfy the query parameters will be merged into a single list of measurements. - -### GET /v2.0/metrics/measurements - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID from which to get measurements from. This parameter can be used to get metrics from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* name (string(255), required) - A metric name to filter metrics by. -* dimensions (string, optional) - A dictionary to filter metrics by specified as a comma separated array of (key, value) pairs as `key1:value1,key2:value2, ...` -* start_time (string, required) - The start time in ISO 8601 combined date and time format in UTC. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. -* offset (timestamp, optional) -* limit (integer, optional) -* merge_metrics (boolean, optional) - allow multiple metrics to be combined into a single list of measurements. -* group_by (string, optional) - list of columns to group the metrics to be returned. - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics/measurements?name=cpu.system_perc&dimensions=hostname:devstack&start_time=2015-03-00T00:00:01Z HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of measurements objects for each unique metric with the following fields: - -* name (string(255)) - A name of a metric. -* dimensions ({string(255): string(255)}) - The dimensions of a metric. -* columns (array[string]) - An array of column names corresponding to the columns in measurements. -* measurements (array[array[]]) - A two dimensional array of measurements for each timestamp. The timestamp is in ISO 8601 combined date and time format, with millisecond resolution. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/metrics/measurements?start_time=2015-03-00T00%3A00%3A00Z&name=cpu.system_perc&dimensions=hostname%3Adevstack" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/metrics/measurements?offset=01ce0acc66131296c8a17294f39aee44ea8963ec_2015-03-03T05%3A24%3A55.123Z&name=cpu.system_perc&dimensions=hostname%3Adevstack&start_time=2015-03-00T00%3A00%3A00Z" - } - ], - "elements": [ - { - "id": "01ce0acc66131296c8a17294f39aee44ea8963ec", - "name": "http_status", - "dimensions": { - "url": "http://localhost:8774/v2.0", - "hostname": "devstack", - "service": "compute" - }, - "columns": [ - "timestamp", - "value", - "value_meta" - ], - "measurements": [ - [ - "2015-03-03T05:22:28.123Z", - 0, - {} - ], - [ - "2015-03-03T05:23:12.123Z", - 0, - {} - ], - [ - "2015-03-03T05:24:55.123Z", - 1, - { - "rc": "404", - "error": "Not Found" - } - ] - ] - } - ] -} -``` -___ - -# Metric Names -Operations for accessing names of metrics. - -## List names -Get names for metrics. - -### GET /v2.0/metrics/names - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID from which to get metric names. This parameter can be used to get metric names from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* dimensions (string, optional) - A dictionary to filter metrics by specified as a comma separated array of (key, value) pairs as `key1:value1,key2:value2, ...` -* offset (string, optional) -* limit (integer, optional) - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics/names HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of metric name objects for each unique metric name (not including dimensions) in alphabetical order with the following fields: - -* name (string(255)) - A name of a metric. - -#### Response Examples -``` -{ - "elements": [ - { - "name": "cpu.idle_perc" - }, - { - "name": "cpu.idle_time" - }, - { - "name": "cpu.percent" - }, - { - "name": "cpu.stolen_perc" - } - ], - "links": [ - { - "href": "http://192.168.10.6:8070/v2.0/metrics/names?offset=cpu.frequency_mhz&limit=4", - "rel": "self" - }, - { - "href": "http://192.168.10.6:8070/v2.0/metrics/names?offset=cpu.stolen_perc&limit=4", - "rel": "next" - } - ] -} -``` -___ - -# Statistics -Operations for calculating statistics of metrics. - -If `group_by` is not specified, then metrics must be fully qualified with name and dimensions so that only statistics are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions. - -If users do not wish to see statistics for a single metric, but would prefer to have statistics from multiple metrics combined, a 'merge_metrics' flag can be specified. when 'merge_metrics' is set to true (**merge_metrics=true**), all statistics for all metrics that satisfy the query parameters will be merged into a single list of statistics. - -## List statistics -Get statistics for metrics. - -### GET /v2.0/metrics/statistics - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID from which to get statistics. This parameter can be used to get statistics from a tenant other than the tenant the request auth token is scoped to. Usage of this query parameter is restricted to users with the monasca admin role, as defined in the monasca api configuration file, which defaults to `monasca-admin`. -* name (string(255), required) - A metric name to filter metrics by. -* dimensions (string, optional) - A dictionary to filter metrics by specified as a comma separated array of (key, value) pairs as `key1:value1,key2:value2, ...` -* statistics (string, required) - A comma separate array of statistics to evaluate. Valid statistics are avg, min, max, sum and count. -* start_time (string, required) - The start time in ISO 8601 combined date and time format in UTC. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. -* period (integer, optional) - The time period to aggregate measurements by. Default is 300 seconds. -* offset (timestamp, optional) -* limit (integer, optional) -* merge_metrics (boolean, optional) - allow multiple metrics to be combined into a single list of statistics. -* group_by (string, optional) - list of columns to group the metrics to be returned. - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/metrics/statistics?name=cpu.system_perc&dimensions=hostname:devstack&start_time=2014-07-18T03:00:00Z&statistics=avg,min,max,sum,count HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Content-Type: application/json -Cache-Control: no-cache -``` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of statistic objects for each unique metric with the following fields: - -* name (string(255)) - A name of a metric. -* dimensions ({string(255): string(255)}) - The dimensions of a metric. -* columns (array[string]) - An array of column names corresponding to the columns in statistics. -* statistics (array[array[]]) - A two dimensional array of statistics for each period. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/metrics/statistics?start_time=2014-07-18T03%3A00%3A00Z&name=cpu.system_perc&dimensions=hostname%3Adevstack&statistics=avg%2Cmin%2Cmax%2Csum%2Ccount" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/metrics/statistics?offset=2014-07-18T03%3A22%3A00Z&name=cpu.system_perc&dimensions=hostname%3Adevstack&start_time=2014-07-18T03%3A00%3A00Z&statistics=avg%2Cmin%2Cmax%2Csum%2Ccount" - } - ], - "elements": [ - { - "id": "2014-07-18T03:22:00Z", - "name": "cpu.system_perc", - "dimensions": { - "hostname": "devstack" - }, - "columns": [ - "timestamp", - "avg", - "min", - "max", - "sum", - "count" - ], - "statistics": [ - [ - "2014-07-18T03:20:00Z", - 2.765, - 1.95, - 4.93, - 22.119999999999997, - 8 - ], - [ - "2014-07-18T03:21:00Z", - 2.412941176470588, - 1.71, - 4.09, - 41.019999999999996, - 17 - ], - [ - "2014-07-18T03:22:00Z", - 2.1135294117647065, - 1.62, - 3.85, - 35.93000000000001, - 17 - ] - ] - } - ] -} -``` -___ - -# Notification Methods -Operations for working with notification methods. - -## Create Notification Method -Creates a notification method through which notifications can be sent to when an alarm state transition occurs. Notification methods can be associated with zero or many alarms. - -### POST /v2.0/notification-methods - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -None. - -#### Request Body -* name (string(250), required) - A descriptive name of the notification method. -* type (string(100), required) - The type of notification method (see [List supported Notification Method Types](#list-supported-notification-method-types) for supported types). -* address (string(100), required) - The email/url address to notify. -* period (integer, optional) - The interval in seconds to periodically send the notification. Supported periods are defined in the Monasca API and Notification service config. The notification will continue to be sent at the defined interval until the alarm it is associated with changes state. - -#### Request Examples -``` -POST /v2.0/notification-methods HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -{ - "name":"Name of notification method", - "type":"EMAIL", - "address":"john.doe@hp.com" -} -``` - -### Response - -#### Status Code -* 201 - Created - -#### Response Body -Returns a JSON notification method object with the following fields: - -* id (string) - ID of notification method -* links ([link]) -* name (string) - Name of notification method -* type (string) - Type of notification method -* address (string) - Address of notification method -* period (integer) - Period of notification method - -#### Response Examples -``` -{ - "id":"35cc6f1c-3a29-49fb-a6fc-d9d97d190508", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508" - } - ], - "name":"Name of notification method", - "type":"EMAIL", - "address":"john.doe@hp.com", - "period":0 -} -``` -___ - -## List Notification Methods -List all notification methods. - -### GET /v2.0/notification-methods - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* offset (string, optional) -* limit (integer, optional) -* sort_by (string, optional) - Comma separated list of fields to sort by, defaults to 'id'. Fields may be followed by 'asc' or 'desc' to set the direction, ex 'address desc' -Allowed fields for sort_by are: 'id', 'name', 'type', 'address', 'created_at', 'updated_at' - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/notification-methods HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of notification method objects with the following fields: - -* id (string) - ID of notification method -* links ([link]) -* name (string) - Name of notification method -* type (string) - Type of notification method -* address (string) - Address of notification method -* period (integer) - Period of notification method - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/notification-methods" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/notification-methods?offset=c60ec47e-5038-4bf1-9f95-4046c6e9a759" - } - ], - "elements": [ - { - "id": "35cc6f1c-3a29-49fb-a6fc-d9d97d190508", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508" - } - ], - "name": "Name of notification method", - "type": "EMAIL", - "address": "john.doe@hp.com", - "period": 0 - }, - { - "id": "c60ec47e-5038-4bf1-9f95-4046c6e9a759", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/notification-methods/c60ec47e-5038-4bf1-9f95-4046c6e9a759" - } - ], - "name": "Name of notification method", - "type": "WEBHOOK", - "address": "http://localhost:3333", - "period": 1 - } - ] -} -``` -___ - -## Get Notification Method -Get the details of a specific notification method. - -### GET /v2.0/notification-methods/{notification_method_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -* notification_method_id (string, required) - ID of the notification method - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -GET http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508 -``` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON notification method object with the following fields: - -* id (string) - ID of notification method -* links ([link]) -* name (string) - Name of notification method -* type (string) - Type of notification method -* address (string) - Address of notification method -* period (integer) - Period of notification method - -#### Response Examples -``` -{ - "id":"35cc6f1c-3a29-49fb-a6fc-d9d97d190508", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508" - } - ], - "name":"Name of notification method", - "type":"EMAIL", - "address":"john.doe@hp.com", - "period": 0 -} -``` -___ - -## Update Notification Method -Update the specified notification method. - -### PUT /v2.0/notification-methods/{notification_method_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* notification_method_id (string, required) - ID of the notification method to update. - -#### Query Parameters -None. - -#### Request Body -* name (string(250), required) - A descriptive name of the notification method. -* type (string(100), required) - The type of notification method (see [List supported Notification Method Types](#list-supported-notification-method-types) for supported types). -* address (string(100), required) - The email/url address to notify. -* period (integer, required) - The interval in seconds to periodically send the notification. Supported periods are defined in the Monasca API and Notification service config. The notification will continue to be sent at the defined interval until the alarm it is associated with changes state. - -#### Request Examples -```` -PUT /v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508 HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -{ - "name":"New name of notification method", - "type":"EMAIL", - "address":"jane.doe@hp.com", - "period":0 -} -```` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON notification method object with the following fields: - -* id (string) - ID of notification method -* links ([link]) -* name (string) - Name of notification method -* type (string) - Type of notification method -* address (string) - Address of notification method -* period (integer) - Period of notification method - -#### Response Examples -```` -{ - "id":"35cc6f1c-3a29-49fb-a6fc-d9d97d190508", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508" - } - ], - "name":"New name of notification method", - "type":"EMAIL", - "address":"jane.doe@hp.com", - "period":0 -} -```` -___ - -## Patch Notification Method -Patch the specified notification method. - -### PATCH /v2.0/notification-methods/{notification_method_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* notification_method_id (string, required) - ID of the notification method to update. - -#### Query Parameters -None. - -#### Request Body -* name (string(250), optional) - A descriptive name of the notification method. -* type (string(100), optional) - The type of notification method (see [List supported Notification Method Types](#list-supported-notification-method-types) for supported types). -* address (string(100), optional) - The email/url address to notify. -* period (integer, optional) - The interval in seconds to periodically send the notification. Supported periods are defined in the Monasca API and Notification service config. The notification will continue to be sent at the defined interval until the alarm it is associated with changes state. - -#### Request Examples -```` -PATCH /v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508 HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -{ - "name":"New name of notification method", - "type":"EMAIL", - "address":"jane.doe@hp.com", - "period":0 -} -```` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON notification method object with the following fields: - -* id (string) - ID of notification method -* links ([link]) -* name (string) - Name of notification method -* type (string) - Type of notification method -* address (string) - Address of notification method -* period (integer) - Period of notification method - -#### Response Examples -```` -{ - "id":"35cc6f1c-3a29-49fb-a6fc-d9d97d190508", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508" - } - ], - "name":"New name of notification method", - "type":"EMAIL", - "address":"jane.doe@hp.com", - "period":0 -} -```` -___ - -## Delete Notification Method -Delete the specified notification method. - -### DELETE /v2.0/notification-methods/{notification_method_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token - -#### Path Parameters -* notification_method_id (string, required) - ID of the notification method to delete - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -DELETE /v2.0/notification-methods/35cc6f1c-3a29-49fb-a6fc-d9d97d190508 HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response - -#### Status Code -* 204 - No Content - -#### Response Body -This request does not return a response body. -___ - -## List supported Notification Method Types -List supported notification method types. - -### GET /v2.0/notification-methods/types/ - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token - - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -```` -GET /v2.0/notification-methods/types -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -```` - -### Response - -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON list which has list of notification types supported - -* type (string) - List of notification methods - - -#### Response Examples -```` -{ - "links":[ - { - "rel":"self", - "href":"http://192.168.10.6:8070/v2.0/notification-methods/types" - } - ], - "elements":[ - { - "type":"EMAIL" - }, - { - - "type":"PAGERDUTY" - }, - { - "type":"WEBHOOK" - } - ] -} -```` -___ - - -# Alarm Definitions -Operations for working with alarm definitions. - -## Create Alarm Definition -Create an alarm definition. - -### POST /v2.0/alarm-definitions - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -None. - -#### Request Body -Consists of an alarm definition. An alarm has the following properties: - -* name (string(255), required) - A unique name of the alarm. Note, the name must be unique. -* description (string(255), optional) - A description of an alarm. -* expression (string, required) - An alarm expression. -* match_by ([string], optional) - The metric dimensions to use to create unique alarms -* severity (string, optional) - Severity of an alarm. Must be either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. Default is `LOW`. -* alarm_actions ([string(50)], optional) - Array of notification method IDs that are invoked when the alarm transitions to the `ALARM` state. -* ok_actions ([string(50)], optional) - Array of notification method IDs that are invoked when the alarm transitions to the `OK` state. -* undetermined_actions ([string(50)], optional) - Array of notification method IDs that are invoked when the alarm transitions to the `UNDETERMINED` state. - -#### Request Examples -``` -POST /v2.0/alarm-definitions HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -{ - "name":"Average CPU percent greater than 10", - "description":"The average CPU percent is greater than 10", - "expression":"(avg(cpu.user_perc{hostname=devstack}) > 10)", - "match_by":[ - "hostname" - ], - "severity":"LOW", - "ok_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "alarm_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` - -To create deterministic definition following request should be sent: -``` -POST /v2.0/alarm-definitions HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache - -{ - "name":"Average CPU percent greater than 10", - "description":"The average CPU percent is greater than 10", - "expression":"(avg(cpu.user_perc{hostname=devstack},deterministic) > 10)", - "match_by":[ - "hostname" - ], - "severity":"LOW", - "ok_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "alarm_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` - -### Response -#### Status Code -* 201 - Created - -#### Response Body -Returns a JSON object of alarm definition objects with the following fields: - -* id (string) - ID of alarm definition. -* links ([link]) - Links to alarm definition. -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* deterministic (boolean) - Is the underlying expression deterministic ? **Read-only**, computed from *expression* -* expression_data (JSON object) - The alarm definition expression as a JSON object. -* match_by ([string]) - The metric dimensions to match to the alarm dimensions -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -#### Response Examples -``` -{ - "id":"b461d659-577b-4d63-9782-a99194d4a472", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/alarm-definitions/b461d659-577b-4d63-9782-a99194d4a472" - } - ], - "name":"Average CPU percent greater than 10", - "description":"The average CPU percent is greater than 10", - "expression":"(avg(cpu.user_perc{hostname=devstack}) > 10)", - "deterministic": false, - "expression_data":{ - "function":"AVG", - "metric_name":"cpu.user_perc", - "dimensions":{ - "hostname":"devstack" - }, - "operator":"GT", - "threshold":10.0, - "period":60, - "periods":1 - }, - "match_by":[ - "hostname" - ], - "severity":"LOW", - "alarm_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` -___ -## List Alarm Definitions -List alarm definitions. - -### GET /v2.0/alarm-definitions - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* name (string(255), optional) - Name of alarm to filter by. -* dimensions (string, optional) - Dimensions of metrics to filter by specified as a comma separated array of (key, value) pairs as `key1:value1,key1:value1, ...`, leaving the value empty `key1,key2:value2` will return all values for that key, multiple values for a key may be specified as `key1:value1|value2|...,key2:value4,...` -* severity (string, optional) - One or more severities to filter by, separated with `|`, ex. `severity=LOW|MEDIUM`. -* offset (integer, optional) -* limit (integer, optional) -* sort_by (string, optional) - Comma separated list of fields to sort by, defaults to 'id', 'created_at'. Fields may be followed by 'asc' or 'desc' to set the direction, ex 'severity desc' -Allowed fields for sort_by are: 'id', 'name', 'severity', 'updated_at', 'created_at' - -#### Request Body -None. - -#### Request Examples -``` -GET /v2.0/alarm-definitions?name=CPU percent greater than 10&dimensions=hostname:devstack&state=UNDETERMINED HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of alarm objects with the following fields: - -* id (string) - ID of alarm definition. -* links ([link]) - Links to alarm definition. -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* deterministic (boolean) - Is the underlying expression deterministic ? **Read-only**, computed from *expression* -* expression_data (JSON object) - The alarm definition expression as a JSON object. -* match_by ([string]) - The metric dimensions to use to create unique alarms -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions?name=CPU%20percent%20greater%20than%2010&dimensions=hostname:devstack&state=UNDETERMINED" - }, - { - "rel": "next", - "href": "http://localhost:8070/v2.0/alarm-definitions?offset=f9935bcc-9641-4cbf-8224-0993a947ea83&name=CPU%20percent%20greater%20than%2010&dimensions=hostname:devstack&state=UNDETERMINED" - } - ], - "elements": [ - { - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83" - } - ], - "name": "CPU percent greater than 10", - "description": "Release the hounds", - "expression": "(avg(cpu.user_perc{hostname=devstack}) > 10)", - "deterministic": false, - "expression_data": { - "function": "AVG", - "metric_name": "cpu.user_perc", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 10, - "period": 60, - "periods": 1 - }, - "match_by": [ - "hostname" - ], - "severity": "CRITICAL", - "actions_enabled": true, - "alarm_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] - }, - { - "id": "g9323232-6543-4cbf-1234-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/g9323232-6543-4cbf-1234-0993a947ea83" - } - ], - "name": "Log error count exceeds 1000", - "description": "Release the cats", - "expression": "(count(log.error{hostname=devstack}, deterministic) > 1000)", - "deterministic": true, - "expression_data": { - "function": "AVG", - "metric_name": "log.error", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 1000, - "period": 60, - "periods": 1 - }, - "match_by": [ - "hostname" - ], - "severity": "CRITICAL", - "actions_enabled": true, - "alarm_actions": [], - "ok_actions": [], - "undetermined_actions": [] - } - ] -} -``` -___ - -## Get Alarm Definition -Get the specified alarm definition. - -### GET /v2.0/alarm-definitions/{alarm_definition_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -* alarm_definition_id (string, required) - Alarm Definition ID - -#### Query Parameters -None. - -#### Request Body -None. - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm definition object with the following fields: - -* id (string) - ID of alarm definition. -* links ([link]) - Links to alarm definition. -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* deterministic (boolean) - Is the underlying expression deterministic ? **Read-only**, computed from *expression* -* expression_data (JSON object) - The alarm definition expression as a JSON object. -* match_by ([string]) - The metric dimensions to use to create unique alarms -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -#### Response Examples -``` -{ - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83" - } - ], - "name": "CPU percent greater than 10", - "description": "Release the hounds", - "expression": "(avg(cpu.user_perc{hostname=devstack}) > 10)", - "deterministic": false, - "expression_data": { - "function": "AVG", - "metric_name": "cpu.user_perc", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 10, - "period": 60, - "periods": 1 - }, - "match_by":[ - "hostname" - ], - "severity": "CRITICAL", - "actions_enabled": true, - "alarm_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` -___ - -## Update Alarm Definition -Update/Replace the specified alarm definition. - -### PUT /v2.0/alarm-definitions/{alarm_definition_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* alarm_definition_id (string, required) - -#### Query Parameters -None. - -#### Request Body -Consists of an alarm definition. An alarm has the following properties: - -* name (string(255), required) - A name of the alarm definition. -* description (string(255), required) - A description of an alarm definition. -* expression (string, required) - An alarm expression. -* match_by ([string], required) - The metric dimensions to use to create unique alarms. This MUST be the same as the existing value for match_by -* severity (string, required) - Severity of an alarm definition. Must be either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* alarm_actions ([string(50)], required) -* ok_actions ([string(50)], required) -* undetermined_actions ([string(50)], required) -* actions_enabled (boolean, required) If actions should be enabled (set to true) or ignored (set to false) - -See Changing Alarm Definitions for restrictions on changing expression and match_by. - -#### Request Examples -``` -PUT /v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Content-Type: application/json -Cache-Control: no-cache - -{ - "name":"CPU percent greater than 15", - "description":"Release the hounds", - "expression":"(avg(cpu.user_perc{hostname=devstack}) > 15)", - "match_by":[ - "hostname" - ], - "severity": "LOW", - "alarm_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "actions_enabled": true -} -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm definition object with the following parameters: - -* id (string) - ID of alarm definition. -* links ([link]) - Links to alarm definition. -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* deterministic (boolean) - Is the underlying expression deterministic ? **Read-only**, computed from *expression* -* expression_data (JSON object) - The alarm definition expression as a JSON object. -* match_by ([string]) - The metric dimensions to use to create unique alarms -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -#### Response Examples -``` -{ - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83" - } - ], - "name": "CPU percent greater than 15", - "description": "Release the hounds", - "expression": "(avg(cpu.user_perc{hostname=devstack}) > 15)", - "deterministic": false, - "expression_data": { - "function": "AVG", - "metric_name": "cpu.user_perc", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 15, - "period": 60, - "periods": 1 - }, - "match_by":[ - "hostname" - ] - "severity": "CRITICAL", - "alarm_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` -___ - -## Patch Alarm Definition -### PATCH /v2.0/alarm-definitions/{alarm_definition_id} -Update selected parameters of the specified alarm definition, and enable/disable its actions. - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* alarm_definition_id (string, required) - Alarm Definition ID - -#### Query Parameters -None. - -#### Request Body -Consists of an alarm with the following properties: - -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* match_by ([string], optional) - The metric dimensions to use to create unique alarms. If specified, this MUST be the same as the existing value for match_by -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -Only the parameters that are specified will be updated. See Changing Alarm Definitions for restrictions on changing expression and match_by. - -#### Request Examples -``` -PATCH /v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Content-Type: application/json -Cache-Control: no-cache - -{ - "name":"CPU percent greater than 15", - "description":"Release the hounds", - "expression":"(avg(cpu.user_perc{hostname=devstack}) > 15)", - "match_by":[ - "hostname" - ], - "severity":"CRITICAL", - "alarm_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions":[ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm definition object with the following fields: - -* id (string) - ID of alarm definition. -* links ([link]) - Links to alarm definition. -* name (string) - Name of alarm definition. -* description (string) - Description of alarm definition. -* expression (string) - The alarm definition expression. -* deterministic (boolean) - Is the underlying expression deterministic ? **Read-only**, computed from *expression* -* expression_data (JSON object) - The alarm definition expression as a JSON object. -* match_by ([string]) - The metric dimensions to use to create unique alarms -* severity (string) - The severity of an alarm definition. Either `LOW`, `MEDIUM`, `HIGH` or `CRITICAL`. -* actions_enabled (boolean) - If true actions for all alarms related to this definition are enabled. -* alarm_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `ALARM` state. -* ok_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `OK` state. -* undetermined_actions ([string]) - Array of notification method IDs that are invoked when the alarms for this definition transition to the `UNDETERMINED` state. - -#### Response Examples -``` -{ - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/f9935bcc-9641-4cbf-8224-0993a947ea83" - } - ], - "name": "CPU percent greater than 15", - "description": "Release the hounds", - "expression": "(avg(cpu.user_perc{hostname=devstack}) > 15)", - "deterministic": false, - "expression_data": { - "function": "AVG", - "metric_name": "cpu.user_perc", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 15, - "period": 60, - "periods": 1 - }, - "match_by":[ - "hostname" - ], - "severity": "CRITICAL", - "alarm_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "ok_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ], - "undetermined_actions": [ - "c60ec47e-5038-4bf1-9f95-4046c6e9a759" - ] -} -``` -___ - -## Delete Alarm Definition -Delete the specified alarm definition. - -### DELETE /v2.0/alarm-definitions/{alarm_definition_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token - -#### Path Parameters -* alarm_id (string, required) - Alarm Definition ID - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -DELETE /v2.0/alarm-definitions/b461d659-577b-4d63-9782-a99194d4a472 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 204 - No content - -#### Response Body -None. -___ - -# Alarms -Operations for working with alarms. - -## List Alarms -List alarms - -### GET /v2.0/alarms - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters - -* alarm_definition_id (string, optional) - Alarm definition ID to filter by. -* metric_name (string(255), optional) - Name of metric to filter by. -* metric_dimensions ({string(255): string(255)}, optional) - Dimensions of metrics to filter by specified as a comma separated array of (key, value) pairs as `key1:value1,key1:value1, ...`, leaving the value empty `key1,key2:value2` will return all values for that key, multiple values for a key may be specified as `key1:value1|value2|...,key2:value4,...` -* state (string, optional) - State of alarm to filter by, either `OK`, `ALARM` or `UNDETERMINED`. -* severity (string, optional) - One or more severities to filter by, separated with `|`, ex. `severity=LOW|MEDIUM`. -* lifecycle_state (string(50), optional) - Lifecycle state to filter by. -* link (string(512), optional) - Link to filter by. -* state_updated_start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. -* offset (integer, optional) -* limit (integer, optional) -* sort_by (string, optional) - Comma separated list of fields to sort by, defaults to 'alarm_id'. Fields may be followed by 'asc' or 'desc' to set the direction, ex 'severity desc' -Allowed fields for sort_by are: 'alarm_id', 'alarm_definition_id', 'alarm_definition_name', 'state', 'severity', 'lifecycle_state', 'link', 'state_updated_timestamp', 'updated_timestamp', 'created_timestamp' - -#### Request Body -None. -" -#### Request Examples -``` -GET /v2.0/alarms?metric_name=cpu.system_perc&metric_dimensions=hostname:devstack&state=UNDETERMINED HTTP/1.1 -Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of alarm objects with the following fields: - -* id (string) - ID of alarm. -* links ([link]) - Links to alarm. -* alarm_definition (JSON object) - Summary of alarm definition. -* metrics ({string, string(255): string(255)}) - The metrics associated with the alarm. -* state (string) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string) - Lifecycle state of alarm. -* link (string) - Link to an external resource related to the alarm. -* state_updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the state was last updated. -* updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when any field was last updated. -* created_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the alarm was created. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms?metric_name=cpu.system_perc&metric_dimensions=hostname%3Adevstack&state=UNDETERMINED" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/alarms?offset=f9935bcc-9641-4cbf-8224-0993a947ea83&metric_name=cpu.system_perc&metric_dimensions=hostname%3Adevstack&state=UNDETERMINED" - } - ], - "elements": [ - { - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83" - }, - { - "rel": "state-history", - "href": "http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83/state-history" - } - ], - "alarm_definition": { - "severity": "LOW", - "id": "b7e5f472-7aa5-4254-a49a-463e749ae817", - "links": [ - { - "href": "http://192.168.10.4:8070/v2.0/alarm-definitions/b7e5f472-7aa5-4254-a49a-463e749ae817", - "rel": "self" - } - ], - "name": "high cpu and load" - }, - "metrics": [ - { - "name": "cpu.system_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "state": "OK", - "lifecycle_state":"OPEN", - "link":"http://somesite.com/this-alarm-info", - "state_updated_timestamp": "2015-03-20T21:04:49.000Z", - "updated_timestamp":"2015-03-20T21:04:49.000Z", - "created_timestamp": "2015-03-20T21:03:34.000Z" - } - ] -} -``` -___ - -## Get Alarm Counts -Get the number of alarms that match the criteria. - -### GET /v2.0/alarms/count - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -None - -#### Query Parameters -* alarm_definition_id (string, optional) - Alarm definition ID to filter by. -* metric_name (string(255), optional) - Name of metric to filter by. -* metric_dimensions ({string(255): string(255)}, optional) - One or more dimensions of metrics to filter by specified as a comma separated array of (key, value or multiple values sperated by `|`) pairs as `key1:value1,key2:value2,key3:value3|value4,...` -* state (string, optional) - State of alarm to filter by, either `OK`, `ALARM` or `UNDETERMINED`. -* severity (string, optional) - One or more severities to filter by, separated with `|`, ex. `severity=LOW|MEDIUM`. -* lifecycle_state (string(50), optional) - Lifecycle state to filter by. -* link (string(512), optional) - Link to filter by. -* state_updated_start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. -* offset (integer, optional) -* limit (integer, optional) -* group_by (string, optional) – a list of fields to group the results by as ```field1,field2,…``` -The group_by field is limited to `alarm_definition_id`, `name`, `state`, `severity`, `link`, `lifecycle_state`, `metric_name`, `dimension_name`, `dimension_value`. -If any of the fields `metric_name`, `dimension_name`, or `dimension_value` are specified, the sum of the resulting counts is not guaranteed to equal the total number of alarms in the system. Alarms with multiple metrics may be included in multiple counts when grouped by any of these three fields. - -#### Request Body -None - -#### Request Examples -``` -GET /v2.0/alarms/count?metric_name=cpu.system_perc&metric_dimensions=hostname:devstack&group_by=state,lifecycle_state -HTTP/1.1 Host: 192.168.10.4:8070 -Content-Type: application/json -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 OK - -#### Response Body -Returns a JSON object containing the following fields: -* links ([link]) - Links to alarms count resource -* columns ([string]) - List of the column names, in the order they were returned -* counts ([array[]]) - A two dimensional array of the counts returned - -#### Response Example -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms/count?metric_name=cpu.system_perc&metric_dimensions=hostname%3Adevstack&group_by=state,lifecycle_state" - } - ], - "columns": ["count", "state", "lifecycle_state"], - "counts": [ - [124, "ALARM", "ACKNOWLEDGED"], - [12, "ALARM", "RESOLVED"], - [235, "OK", "OPEN"], - [61, "OK", "RESOLVED"], - [13, "UNDETERMINED", "ACKNOWLEDGED"], - [1, "UNDETERMINED", "OPEN"], - [2, "UNDETERMINED", "RESOLVED"], - ] - } -``` -___ - -## List Alarms State History -List alarm state history for alarms. - -### GET /v2.0/alarms/state-history - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* dimensions (string, optional) - Dimensions of metrics to filter by specified as a comma separated array of (key, value) pairs as `key1:value1,key1:value1, ...` -* start_time (string, optional) - The start time in ISO 8601 combined date and time format in UTC. -* end_time (string, optional) - The end time in ISO 8601 combined date and time format in UTC. -* offset (timestamp, optional) - The offset in ISO 8601 combined date and time format in UTC. -* limit (integer, optional) - -#### Request Body -None. - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of alarm state transition objects with the following fields: - -* id - Alarm State Transition ID. -* alarm_id (string) - Alarm ID. -* metrics ({string, string, string(255): string(255)}) - The metrics associated with the alarm state transition. -* old_state (string) - The old state of the alarm. Either `OK`, `ALARM` or `UNDETERMINED`. -* new_state (string) - The new state of the alarm. Either `OK`, `ALARM` or `UNDETERMINED`. -* reason (string) - The reason for the state transition. -* reason_data (string) - The reason for the state transition as a JSON object. -* timestamp (string) - The time in ISO 8601 combined date and time format in UTC when the state transition occurred. -* sub_alarms ({{string, string, string(255): string(255), string, string, string, string, boolean}, string, [string]) - The sub-alarms stated of when the alarm state transition occurred. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms/state-history?dimensions=hostname%3Adevstack" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/alarms/state-history?offset=1424451007004&dimensions=hostname%3Adevstack" - } - ], - "elements": [ - { - "id": "1424451007002", - "alarm_id": "bc7f388d-3522-47bd-b4ae-41567090ab72", - "metrics": [ - { - "id": null, - "name": "cpu.system_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "old_state": "UNDETERMINED", - "new_state": "OK", - "reason": "The alarm threshold(s) have not been exceeded for the sub-alarms: avg(cpu.system_perc{hostname=devstack}) > 15.0 with the values: [1.5]", - "reason_data": "{}", - "timestamp": "2015-02-20T16:50:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "cpu.system_perc", - "dimensions": { - "hostname": "devstack" - }, - "operator": "GT", - "threshold": 15, - "period": 60, - "periods": 1, - "deterministic": false - }, - "sub_alarm_state": "OK", - "current_values": [ - 1.5 - ] - } - ] - }, - { - "id": "1424451007003", - "alarm_id": "5ec51b06-193b-49f7-bcf7-b80d11010137", - "metrics": [ - { - "id": null, - "name": "mysql.performance.slow_queries", - "dimensions": { - "component": "mysql", - "service": "mysql", - "hostname": "devstack" - } - } - ], - "old_state": "ALARM", - "new_state": "OK", - "reason": "The alarm threshold(s) have not been exceeded for the sub-alarms: avg(mysql.performance.slow_queries) > 10.0 times 3 with the values: [29.23069852941176, 20.146139705882355, 7.536764705882352]", - "reason_data": "{}", - "timestamp": "2015-02-20T16:12:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "mysql.performance.slow_queries", - "dimensions": {}, - "operator": "GT", - "threshold": 10, - "period": 60, - "periods": 3, - "deterministic": false - }, - "sub_alarm_state": "OK", - "current_values": [ - 29.23069852941176, - 20.146139705882355, - 7.536764705882352 - ] - } - ] - }, - { - "id": "1424451007004", - "alarm_id": "5ec51b06-193b-49f7-bcf7-b80d11010137", - "metrics": [ - { - "id": null, - "name": "mysql.performance.slow_queries", - "dimensions": { - "component": "mysql", - "service": "mysql", - "hostname": "devstack" - } - } - ], - "old_state": "OK", - "new_state": "ALARM", - "reason": "Thresholds were exceeded for the sub-alarms: avg(mysql.performance.slow_queries) > 10.0 times 3 with the values: [36.32720588235294, 29.23069852941176, 20.146139705882355]", - "reason_data": "{}", - "timestamp": "2015-02-20T16:11:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "mysql.performance.slow_queries", - "dimensions": {}, - "operator": "GT", - "threshold": 10, - "period": 60, - "periods": 3, - "deterministic": false - }, - "sub_alarm_state": "ALARM", - "current_values": [ - 36.32720588235294, - 29.23069852941176, - 20.146139705882355 - ] - } - ] - } - ] -} - -``` -___ - -## Get Alarm -Get the specified alarm. - -### GET /v2.0/alarms/{alarm_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -* alarm_id (string, required) - Alarm ID - -#### Query Parameters -None. - -#### Request Body -None. - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm object with the following fields: - -* id (string) - ID of alarm. -* links ([link]) - Links to alarm. -* alarm_definition (JSON object) - Summary of alarm definition. -* metrics ({string, string(255): string(255)}) - The metrics associated with the alarm. -* state (string) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string) - Lifecycle state of alarm. -* link (string) - Link to an external resource related to the alarm. -* state_updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the state was last updated. -* updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when any field was last updated. -* created_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the alarm was created. - -#### Response Examples -``` -{ - "id":"f9935bcc-9641-4cbf-8224-0993a947ea83", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83" - }, - { - "rel":"state-history", - "href":"http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83/state-history" - } - ], - "alarm_definition": - { - "id":"ad837fca-5564-4cbf-523-0117f7dac6ad", - "name":"Average CPU percent greater than 10", - "severity":"LOW", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/alarm-definitions/ad837fca-5564-4cbf-523-0117f7dac6ad - } - ] - }, - "metrics":[{ - "name":"cpu.system_perc", - "dimensions":{ - "hostname":"devstack" - } - }], - "state":"OK", - "lifecycle_state":"OPEN", - "link":"http://somesite.com/this-alarm-info", - "state_updated_timestamp": "2015-03-20T21:04:49.000Z", - "updated_timestamp": "2015-03-20T21:04:49.000Z", - "created_timestamp": "2015-03-20T21:03:34.000Z" -} -``` -___ - -## Update Alarm -Update/Replace the entire state of the specified alarm. - -### PUT /v2.0/alarms/{alarm_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* alarm_id (string, required) - -#### Query Parameters -None. - -#### Request Body -Consists of an alarm definition. An alarm has the following mutable properties: - -* state (string) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string(50)) - Lifecycle state of alarm. -* link (string(512)) - Link to an external resource related to the alarm. - -#### Request Examples -``` -PUT /v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Content-Type: application/json -Cache-Control: no-cache - -{ - "state":"OK", - "lifecycle_state":"OPEN", - "link":"http://pagerduty.com/" -} -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm object with the following parameters: - -* id (string) - ID of alarm. -* links ([link]) - Links to alarm. -* alarm_definition_id (string) - Name of alarm. -* description (string) - ID of the alarm definition. -* metrics ({string, string(255): string(255)}) - The metrics associated with the alarm. -* state (string) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string) - Lifecycle state of alarm. -* link (string) - Link to an external resource related to the alarm. -* state_updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the state was last updated. -* updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when any field was last updated. -* created_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the alarm was created. - -#### Response Examples -``` -{ - "id":"f9935bcc-9641-4cbf-8224-0993a947ea83", - "links":[ - { - "rel":"self", - "href":"http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83" - }, - { - "rel":"state-history", - "href":"http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83/state-history" - } - ], - "alarm_definition_id":"ad837fca-5564-4cbf-523-0117f7dac6ad", - "metrics":[{ - "name":"cpu.system_perc", - "dimensions":{ - "hostname":"devstack" - } - }], - "state":"OK", - "lifecycle_state":"OPEN", - "link":"http://somesite.com/this-alarm-info", - "state_updated_timestamp": "2015-03-20T21:04:49.000Z", - "updated_timestamp": "2015-03-20T21:04:49.000Z", - "created_timestamp": "2015-03-20T21:03:34.000Z" -} -``` -___ - -## Patch Alarm -### PATCH /v2.0/alarms/{alarm_id} -Update select parameters of the specified alarm, set the alarm state and enable/disable it. To set lifecycle_state or link field to `null`, use an UPDATE Alarm request with `"lifecycle_state":null` and/or `"link":null`. - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json -* Accept (string) - application/json - -#### Path Parameters -* alarm_id (string, required) - Alarm ID - -#### Query Parameters -None. - -#### Request Body -Consists of an alarm with the following mutable properties: - -* state (string, optional) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string(50), optional) - Lifecycle state of alarm. -* link (string(512), optional) - Link to an external resource related to the alarm. - -#### Request Examples -``` -PATCH /v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Content-Type: application/json -Cache-Control: no-cache - -{ - "lifecycle_state":"OPEN", - "link":"http://somesite.com/this-alarm-info" -} -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON alarm object with the following fields: - -* id (string) - ID of alarm. -* links ([link]) - Links to alarm. -* alarm_definition_id (string) - Name of alarm. -* description (string) - ID of the alarm definition. -* metrics ({string, string(255): string(255)}) - The metrics associated with the alarm. -* state (string) - State of alarm, either `OK`, `ALARM` or `UNDETERMINED`. -* lifecycle_state (string) - Lifecycle state of the alarm. -* link (string) - Link to an external resource related to the alarm. -* state_updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the state was last updated. -* updated_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when any field was last updated. -* created_timestamp - Timestamp in ISO 8601 combined date and time format in UTC when the alarm was created. - -#### Response Examples -``` -{ - "id": "f9935bcc-9641-4cbf-8224-0993a947ea83", - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83" - }, - { - "rel": "state-history", - "href": "http://192.168.10.4:8070/v2.0/alarms/f9935bcc-9641-4cbf-8224-0993a947ea83/state-history" - } - ], - "alarm_definition_id": "ad837fca-5564-4cbf-523-0117f7dac6ad", - "metrics": [ - { - "name": "cpu.system_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "state": "OK" - "lifecycle_state":"OPEN", - "link":"http://somesite.com/this-alarm-info", - "state_updated_timestamp": "2015-03-20T21:04:49.000Z", - "updated_timestamp": "2015-03-20T21:04:49.000Z", - "created_timestamp": "2015-03-20T21:03:34.000Z" -} - -``` -___ - -## Delete Alarm -Delete the specified alarm. - -### DELETE /v2.0/alarms/{alarm_id} - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token - -#### Path Parameters -* alarm_id (string, required) - Alarm ID - -#### Query Parameters -None. - -#### Request Body -None. - -#### Request Examples -``` -DELETE /v2.0/alarms/b461d659-577b-4d63-9782-a99194d4a472 HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 204 - No content - -#### Response Body -None. -___ - -## List Alarm State History -List the alarm state history for the specified alarm. - -### GET /v2.0/alarms/{alarm_id}/state-history - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Accept (string) - application/json - -#### Path Parameters -* alarm_id (string, required) - -#### Query Parameters -* offset (timestamp, optional) - The offset in ISO 8601 combined date and time format in UTC. -* limit (integer, optional) - -#### Request Body -None. - -#### Request Data -``` -GET /v2.0/alarms/37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0/state-history HTTP/1.1 -Host: 192.168.10.4:8070 -X-Auth-Token: 2b8882ba2ec44295bf300aecb2caa4f7 -Cache-Control: no-cache -``` - -### Response -#### Status Code -* 200 - OK - -#### Response Body -Returns a JSON object with a 'links' array of links and an 'elements' array of alarm state transition objects with the following fields: - -* id - Alarm State Transition ID. -* alarm_id (string) - Alarm ID. -* metrics ({string, string, string(255): string(255)}) - The metrics associated with the alarm state transition. -* old_state (string) - The old state of the alarm. Either `OK`, `ALARM` or `UNDETERMINED`. -* new_state (string) - The new state of the alarm. Either `OK`, `ALARM` or `UNDETERMINED`. -* reason (string) - The reason for the state transition. -* reason_data (string) - The reason for the state transition as a JSON object. -* timestamp (string) - The time in ISO 8601 combined date and time format in UTC when the state transition occurred. -* sub_alarms ({{string, string, string(255): string(255), string, string, string, string, boolean}, string, [string]) - The sub-alarms stated of when the alarm state transition occurred. - -#### Response Examples -``` -{ - "links": [ - { - "rel": "self", - "href": "http://192.168.10.4:8070/v2.0/alarms/37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0/state-history" - }, - { - "rel": "next", - "href": "http://192.168.10.4:8070/v2.0/alarms/37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0/state-history?offset=1424452147006" - } - ], - "elements": [ - { - "id": "1424452147003", - "alarm_id": "37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0", - "metrics": [ - { - "id": null, - "name": "cpu.idle_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "old_state": "OK", - "new_state": "ALARM", - "reason": "Thresholds were exceeded for the sub-alarms: avg(cpu.idle_perc) < 10.0 times 3 with the values: [0.0, 0.0, 0.0]", - "reason_data": "{}", - "timestamp": "2015-02-20T17:09:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "cpu.idle_perc", - "dimensions": {}, - "operator": "LT", - "threshold": 10, - "period": 60, - "periods": 3, - "deterministic": false - }, - "sub_alarm_state": "ALARM", - "current_values": [ - 0, - 0, - 0 - ] - } - ] - }, - { - "id": "1424452147004", - "alarm_id": "37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0", - "metrics": [ - { - "id": null, - "name": "cpu.idle_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "old_state": "ALARM", - "new_state": "OK", - "reason": "The alarm threshold(s) have not been exceeded for the sub-alarms: avg(cpu.idle_perc) < 10.0 times 3 with the values: [0.0, 0.0, 72.475]", - "reason_data": "{}", - "timestamp": "2015-02-20T17:02:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "cpu.idle_perc", - "dimensions": {}, - "operator": "LT", - "threshold": 10, - "period": 60, - "periods": 3, - "deterministic": false - }, - "sub_alarm_state": "OK", - "current_values": [ - 0, - 0, - 72.475 - ] - } - ] - }, - { - "id": "1424452147005", - "alarm_id": "37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0", - "metrics": [ - { - "id": null, - "name": "cpu.idle_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "old_state": "OK", - "new_state": "ALARM", - "reason": "Thresholds were exceeded for the sub-alarms: avg(cpu.idle_perc) < 10.0 times 3 with the values: [0.0, 0.0, 0.0]", - "reason_data": "{}", - "timestamp": "2015-02-20T16:56:07.000Z", - "sub_alarms": [ - { - "sub_alarm_expression": { - "function": "AVG", - "metric_name": "cpu.idle_perc", - "dimensions": {}, - "operator": "LT", - "threshold": 10, - "period": 60, - "periods": 3, - "deterministic": false - }, - "sub_alarm_state": "ALARM", - "current_values": [ - 0, - 0, - 0 - ] - } - ] - }, - { - "id": "1424452147006", - "alarm_id": "37d1ddf0-d7e3-4fc0-979b-25ac3779d9e0", - "metrics": [ - { - "id": null, - "name": "cpu.idle_perc", - "dimensions": { - "hostname": "devstack" - } - } - ], - "old_state": "UNDETERMINED", - "new_state": "OK", - "reason": "The alarm threshold(s) have not been exceeded", - "reason_data": "{}", - "timestamp": "2015-02-20T15:02:30.000Z", - "sub_alarms": [] - } - ] -} - -``` -___ - -# Logs -The logs resource allows logs to be created and queried. - -## Create Logs -Create logs. - -### POST /v2.0/logs - -#### Headers -* X-Auth-Token (string, required) - Keystone auth token -* Content-Type (string, required) - application/json - -#### Path Parameters -None. - -#### Query Parameters -* tenant_id (string, optional, restricted) - Tenant ID (project ID) to create - log on behalf of. Usage of this query parameter requires the role specified - in the configuration option `delegate_roles` . - -#### Request Body -JSON object which can have a maximum size of 5 MB. It consists of global -dimensions (optional) and array of logs. Each single log message with -resulting envelope can have a maximum size of 1 MB. -Dimensions is a dictionary of key-value pairs and should be consistent with -metric dimensions. - -Logs is an array of JSON objects describing the log entries. Every log object -can have individual set of dimensions which has higher precedence than global -ones. It should be noted that dimensions presented in each log record are also -optional. - - If both global (at the root level) and local (at log entry level) - dimensions would be present, they will be merged into one dictionary. - Please note that local dimensions are logically considered as more - specific thus in case of conflict (i.e. having two entries with the same - key in both global and local dimensions) local dimensions take - precedence over global dimensions. - -#### Request Examples - -POST logs - -``` -POST /v2.0/logs HTTP/1.1 -Host: 192.168.10.4:5607 -Content-Type: application/json -X-Auth-Token: 27feed73a0ce4138934e30d619b415b0 -Cache-Control: no-cache - -{ - "dimensions":{ - "hostname":"mini-mon", - "service":"monitoring" - }, - "logs":[ - { - "message":"msg1", - "dimensions":{ - "component":"mysql", - "path":"/var/log/mysql.log" - } - }, - { - "message":"msg2", - "dimensions":{ - "component":"monasca-api", - "path":"/var/log/monasca/monasca-api.log" - } - } - ] -} -``` - -### Response -#### Status Code -* 204 - No content - -#### Response Body -This request does not return a response body. -___ - -# License -(C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP -(C) Copyright 2019 FUJITSU LIMITED - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/etc/api-config.ini b/etc/api-config.ini deleted file mode 100644 index e8444c194..000000000 --- a/etc/api-config.ini +++ /dev/null @@ -1,27 +0,0 @@ -[DEFAULT] -name = monasca_api - -[pipeline:main] -pipeline = request_id auth api - -[app:api] -paste.app_factory = monasca_api.api.server:launch - -[filter:auth] -paste.filter_factory = monasca_api.healthcheck.keystone_protocol:filter_factory - -[filter:request_id] -paste.filter_factory = oslo_middleware.request_id:RequestId.factory - -[server:main] -use = egg:gunicorn#main -host = 127.0.0.1 -port = 8070 -workers = 9 -worker-connections = 2000 -worker-class = eventlet -timeout = 30 -backlog = 2048 -keepalive = 2 -proc_name = monasca-api -loglevel = DEBUG diff --git a/etc/api-logging.conf b/etc/api-logging.conf deleted file mode 100644 index 41d547773..000000000 --- a/etc/api-logging.conf +++ /dev/null @@ -1,49 +0,0 @@ -[loggers] -keys = root, sqlalchemy, kafka, kafkalib - -[handlers] -keys = console, file - -[formatters] -keys = context - -[logger_root] -level = DEBUG -handlers = console, file - -[logger_sqlalchemy] -qualname = sqlalchemy.engine -# "level = INFO" logs SQL queries. -# "level = DEBUG" logs SQL queries and results. -# "level = WARN" logs neither. (Recommended for production systems.) -level = DEBUG -handlers = console, file -propagate=0 - -[logger_kafka] -qualname = kafka -level = DEBUG -handlers = console, file -propagate = 0 - -[logger_kafkalib] -qualname = monasca_common.kafka_lib -level = INFO -handlers = console, file -propagate = 0 - -[handler_console] -class = logging.StreamHandler -args = (sys.stderr,) -level = DEBUG -formatter = context - -[handler_file] -class = logging.handlers.RotatingFileHandler -level = DEBUG -formatter = context -# store up to 5*100MB of logs -args = ('/var/log/monasca/api/monasca-api.log', 'a', 104857600, 5) - -[formatter_context] -class = oslo_log.formatters.ContextFormatter \ No newline at end of file diff --git a/etc/api-uwsgi.ini b/etc/api-uwsgi.ini deleted file mode 100644 index 74e0dc2c7..000000000 --- a/etc/api-uwsgi.ini +++ /dev/null @@ -1,25 +0,0 @@ -[uwsgi] -wsgi-file = /usr/local/bin/monasca-api-wsgi - -# Versions of mod_proxy_uwsgi>=2.0.6 should use a UNIX socket, see -# http://uwsgi-docs.readthedocs.org/en/latest/Apache.html#mod-proxy-uwsgi -uwsgi-socket = 127.0.0.1:8070 - -# Override the default size for headers from the 4k default. -buffer-size = 65535 - -# This is running standalone -master = true - -enable-threads = true - -# Tune this to your environment. -processes = 4 - -# uwsgi recommends this to prevent thundering herd on accept. -thunder-lock = true - -plugins = python - -# This ensures that file descriptors aren't shared between keystone processes. -lazy-apps = true \ No newline at end of file diff --git a/java/pom.xml b/java/pom.xml deleted file mode 100644 index ee6b8b0e7..000000000 --- a/java/pom.xml +++ /dev/null @@ -1,435 +0,0 @@ - - 4.0.0 - - monasca-api - monasca-api - 1.2.1-SNAPSHOT - http://github.com/openstack/monasca-api - jar - - - 3.0 - - - - - ${maven.build.timestamp} - yyyy-MM-dd'T'HH:mm:ss - ${project.version}-${timestamp}-${gitRevision} - ${project.artifactId}-${computedVersion} - 1.3.0-SNAPSHOT - 0.7.0 - 2.3.9 - - false - UTF-8 - UTF-8 - ${project.artifactId}-${project.version}-shaded - - - - - scm:git:git@git.openstack.org:openstack/monasca-api - scm:git:git.openstack.org:openstack/monasca-api - - - - - release-deploy-url-override - - - BUILD_NUM - - - - ${versionNumber}.${BUILD_NUM} - - - - - - - monasca-common - monasca-common-influxdb - ${mon.common.version} - - - monasca-common - monasca-common-model - ${mon.common.version} - - - monasca-common - monasca-common-persistence - ${mon.common.version} - - - monasca-common - monasca-common-util - ${mon.common.version} - - - monasca-common - monasca-common-hibernate - ${mon.common.version} - - - commons-validator - commons-validator - 1.4.0 - - - monasca-common - monasca-common-kafka - ${mon.common.version} - - - monasca-common - monasca-common-middleware - ${mon.common.version} - - - io.dropwizard - dropwizard-core - ${dropwizard.version} - - - io.dropwizard - dropwizard-db - ${dropwizard.version} - - - io.dropwizard - dropwizard-jdbi - ${dropwizard.version} - - - io.dropwizard - dropwizard-assets - ${dropwizard.version} - - - io.dropwizard - dropwizard-jersey - ${dropwizard.version} - - - mysql - mysql-connector-java - 5.1.36 - - - org.drizzle.jdbc - drizzle-jdbc - 1.4 - - - com.google.code.findbugs - jsr305 - 2.0.0 - - - org.apache.curator - curator-recipes - 2.2.0-incubating - - - org.slf4j - slf4j-log4j12 - - - - - org.apache.kafka - kafka_2.11 - 0.8.2.2 - - - com.sun.jdmk - jmxtools - - - com.sun.jmx - jmxri - - - org.slf4j - slf4j-simple - - - - - org.influxdb - influxdb-java - 1.0 - - - postgresql - postgresql - 9.1-901.jdbc4 - - - com.zaxxer - HikariCP-java6 - ${hikaricp.version} - compile - - - - - monasca-common - monasca-common-testing - ${mon.common.version} - test - - - monasca-common - monasca-common-dropwizard - ${mon.common.version} - test-jar - test - - - io.dropwizard - dropwizard-testing - ${dropwizard.version} - test - - - org.mockito - mockito-all - 1.9.5 - test - - - com.github.docker-java - docker-java - 0.9.1 - test - - - com.jayway.restassured - rest-assured - 2.3.2 - - - commons-io - commons-io - 2.4 - - - org.testng - testng - 6.8.8 - test - - - org.apache.httpcomponents - httpclient - 4.4 - - - - - - - maven-clean-plugin - 2.5 - - - - ${project.basedir}/debs - - - - - - org.codehaus.mojo - buildnumber-maven-plugin - 1.1 - - - validate - - create - - - - - false - 6 - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.1 - - 1.7 - 1.7 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.17 - - - org.apache.maven.surefire - surefire-testng - 2.17 - - - - performance,functional,integration,database,slow - - - - - org.apache.maven.plugins - maven-failsafe-plugin - 2.17 - - performance,functional,integration,database,slow,orm - ${skipITs} - methods - 4 - - - - - - org.apache.maven.plugins - maven-shade-plugin - 1.2 - - ${computedName} - true - - - - org.eclipse.jetty.orbit:javax.servlet - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - org.hamcrest:hamcrest-core - org.hamcrest:hamcrest-library - - - - - - package - - shade - - - - - - monasca.api.MonApiApplication - - - true - - - - - - org.apache.maven.plugins - maven-jar-plugin - 2.4 - - - - monasca.api - - - ${project.artifactId}-${computedVersion} - - - - - - maven-assembly-plugin - 2.4.1 - - - src/assembly/tar.xml - - ${artifactNamedVersion} - - - - make-assembly - package - - single - - - - - - jdeb - org.vafer - 1.0 - - - package - - jdeb - - - ${project.basedir}/debs/binaries/${computedName}.deb - - - file - ${project.build.directory}/${shadedJarName}.jar - /opt/monasca/monasca-api.jar - - - file - ${project.basedir}/src/deb/etc/api-config.yml-sample - - /etc/monasca/api-config.yml-sample - - - - - - - - org.apache.maven.plugins - maven-source-plugin - 2.3 - - - attach-sources - - jar - - - - - - org.apache.maven.plugins - maven-scm-plugin - 1.9.2 - - ${project.version} - - - - - - diff --git a/java/src/assembly/tar.xml b/java/src/assembly/tar.xml deleted file mode 100644 index 2d55927e3..000000000 --- a/java/src/assembly/tar.xml +++ /dev/null @@ -1,29 +0,0 @@ - - tar - - tar.gz - - - - ${project.basedir} - / - - README* - LICENSE* - - - - - - ${project.build.directory}/${shadedJarName}.jar - / - monasca-api.jar - - - ${project.basedir}/src/deb/etc/api-config.yml-sample - examples - - - diff --git a/java/src/deb/control/control b/java/src/deb/control/control deleted file mode 100644 index f0fdbc6ad..000000000 --- a/java/src/deb/control/control +++ /dev/null @@ -1,9 +0,0 @@ -Package: [[name]] -Section: misc -Priority: optional -Architecture: all -Depends: openjdk-7-jre-headless | openjdk-7-jre -Version: [[version]]-[[timestamp]]-[[buildNumber]] -Maintainer: Monasca Team -Description: Monasca-API - RESTful API for all Monasca data. diff --git a/java/src/deb/control/prerm b/java/src/deb/control/prerm deleted file mode 100644 index 412293690..000000000 --- a/java/src/deb/control/prerm +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh - -case "$1" in - remove) - stop monasca-api - ;; -esac - -exit 0 diff --git a/java/src/deb/etc/api-config.yml-sample b/java/src/deb/etc/api-config.yml-sample deleted file mode 100755 index 996e2ef39..000000000 --- a/java/src/deb/etc/api-config.yml-sample +++ /dev/null @@ -1,148 +0,0 @@ -# The region for which all metrics passing through this server will be persisted -region: region-a - -# Maximum rows (Mysql) or points (Influxdb) to return when listing elements -maxQueryLimit: 10000 - -# Whether this server is running on a secure port -accessedViaHttps: false - -# Topic for publishing metrics to -metricsTopic: metrics - -# Topic for publishing domain events to -eventsTopic: events - -kafka: - brokerUris: - - 192.168.10.4:9092 - zookeeperUris: - - 192.168.10.4:2181 - healthCheckTopic: healthcheck - -mysql: - driverClass: com.mysql.jdbc.Driver - url: jdbc:mysql://192.168.10.4:3306/mon?connectTimeout=5000&autoReconnect=true&useLegacyDatetimeCode=false - user: monapi - password: password - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 8 - maxSize: 32 - checkConnectionWhileIdle: false - checkConnectionOnBorrow: true - -databaseConfiguration: - databaseType: influxdb - -influxDB: - version: V9 - maxHttpConnections: 100 - # Retention policy may be left blank to indicate default policy. - retentionPolicy: - name: mon - url: http://192.168.10.4:8086 - user: mon_api - password: password - -vertica: - driverClass: com.vertica.jdbc.Driver - url: jdbc:vertica://192.168.10.8/mon - user: dbadmin - password: password - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 4 - maxSize: 32 - checkConnectionWhileIdle: false - # - # vertica database hint to be added to SELECT - # statements. For example, the hint below is used - # to tell vertica that the query can be satisfied - # locally (replicated projection). - # - # dbHint: "/*+KV(01)*/" - dbHint: "" - -middleware: - enabled: true - serverVIP: identity.example.com - serverPort: 9543 - useHttps: true - truststore: etc/monasca/truststore.jks - truststorePass: changeit - connTimeout: 500 - connSSLClientAuth: true - keystore: etc/monasca/keystore.jks - keystorePass: changeit - connPoolMaxActive: 3 - connPoolMaxIdle: 3 - connPoolEvictPeriod: 600000 - connPoolMinIdleTime: 600000 - connRetryTimes: 2 - connRetryInterval: 50 - defaultAuthorizedRoles: [user, domainuser, domainadmin, monasca-user] - readOnlyAuthorizedRoles: [monasca-read-only-user] - agentAuthorizedRoles: [monasca-agent] - adminAuthMethod: password - adminUser: admin - adminPassword: admin - adminProjectId: - adminProjectName: - adminUserDomainId: - adminUserDomainName: - adminProjectDomainId: - adminProjectDomainName: - adminToken: - timeToCacheToken: 600 - maxTokenCacheSize: 1048576 - -server: - applicationConnectors: - - type: http - port: 8070 - maxRequestHeaderSize: 16KiB # Allow large headers used by keystone tokens - requestLog: - timeZone: UTC - appenders: - - type: file - currentLogFilename: /var/log/monasca/api/request.log - threshold: ALL - archive: true - archivedLogFilenamePattern: /var/log/monasca/api/request-%d.log.gz - archivedFileCount: 5 - -# Logging settings. -logging: - - # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, DEBUG, TRACE, or ALL. - level: WARN - - # Logger-specific levels. - loggers: - - # Sets the level for 'com.example.app' to DEBUG. - com.example.app: DEBUG - - appenders: - - type: console - threshold: ALL - timeZone: UTC - target: stdout - logFormat: # TODO - - - type: file - currentLogFilename: /var/log/monasca/api/monasca-api.log - threshold: ALL - archive: true - archivedLogFilenamePattern: /var/log/monasca/api/monasca-api-%d.log.gz - archivedFileCount: 5 - timeZone: UTC - logFormat: # TODO - - - type: syslog - host: localhost - port: 514 - facility: local0 - threshold: ALL - logFormat: # TODO diff --git a/java/src/main/java/monasca/api/ApiConfig.java b/java/src/main/java/monasca/api/ApiConfig.java deleted file mode 100644 index f7b5127bf..000000000 --- a/java/src/main/java/monasca/api/ApiConfig.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api; - -import com.fasterxml.jackson.annotation.JsonProperty; -import monasca.common.hibernate.configuration.HibernateDbConfiguration; -import monasca.common.messaging.kafka.KafkaConfiguration; -import monasca.api.infrastructure.middleware.MiddlewareConfiguration; -import monasca.api.infrastructure.persistence.vertica.VerticaDataSourceFactory; -import monasca.common.configuration.DatabaseConfiguration; - -import monasca.common.configuration.InfluxDbConfiguration; -import io.dropwizard.Configuration; -import io.dropwizard.db.DataSourceFactory; - -import org.hibernate.validator.constraints.NotEmpty; - -import java.util.List; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - - -public class ApiConfig extends Configuration { - @NotEmpty - public String region; - @NotNull - public Boolean accessedViaHttps; - @NotEmpty - public String metricsTopic = "metrics"; - @NotEmpty - public String eventsTopic = "events"; - @NotNull - public int maxQueryLimit; - @NotEmpty - public String alarmStateTransitionsTopic = "alarm-state-transitions"; - @NotEmpty - public List validNotificationPeriods; - @Valid - @NotNull - public DataSourceFactory mysql; - @Valid - @NotNull - public VerticaDataSourceFactory vertica; - @Valid - @NotNull - public KafkaConfiguration kafka; - @Valid - @NotNull - public MiddlewareConfiguration middleware; - @Valid - public InfluxDbConfiguration influxDB; - @Valid - @JsonProperty - public DatabaseConfiguration databaseConfiguration; - @Valid - public HibernateDbConfiguration hibernate; -} diff --git a/java/src/main/java/monasca/api/MonApiApplication.java b/java/src/main/java/monasca/api/MonApiApplication.java deleted file mode 100644 index 64cc7333a..000000000 --- a/java/src/main/java/monasca/api/MonApiApplication.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.module.SimpleModule; - -import org.eclipse.jetty.servlets.CrossOriginFilter; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -import javax.servlet.FilterRegistration.Dynamic; -import javax.ws.rs.ext.ExceptionMapper; - -import io.dropwizard.Application; -import io.dropwizard.jdbi.bundles.DBIExceptionsBundle; -import io.dropwizard.setup.Bootstrap; -import io.dropwizard.setup.Environment; -import monasca.api.infrastructure.servlet.MockAuthenticationFilter; -import monasca.api.infrastructure.servlet.PostAuthenticationFilter; -import monasca.api.infrastructure.servlet.PreAuthenticationFilter; -import monasca.api.infrastructure.servlet.RoleAuthorizationFilter; -import monasca.api.resource.AlarmDefinitionResource; -import monasca.api.resource.AlarmResource; -import monasca.api.resource.DimensionResource; -import monasca.api.resource.MeasurementResource; -import monasca.api.resource.MetricResource; -import monasca.api.resource.NotificationMethodResource; -import monasca.api.resource.NotificationMethodTypesResource; -import monasca.api.resource.StatisticResource; -import monasca.api.resource.VersionResource; -import monasca.api.resource.exception.ConstraintViolationExceptionMapper; -import monasca.api.resource.exception.EntityExistsExceptionMapper; -import monasca.api.resource.exception.EntityNotFoundExceptionMapper; -import monasca.api.resource.exception.IllegalArgumentExceptionMapper; -import monasca.api.resource.exception.InvalidEntityExceptionMapper; -import monasca.api.resource.exception.JsonMappingExceptionManager; -import monasca.api.resource.exception.JsonProcessingExceptionMapper; -import monasca.api.resource.exception.MultipleMetricsExceptionMapper; -import monasca.api.resource.exception.ThrowableExceptionMapper; -import monasca.api.resource.serialization.SubAlarmExpressionSerializer; -import monasca.common.middleware.AuthConstants; -import monasca.common.middleware.TokenAuth; -import monasca.common.util.Injector; - -/** - * Monitoring API application. - */ -public class MonApiApplication extends Application { - public static void main(String[] args) throws Exception { - /* - * This should allow command line options to show the current version - * java -jar monasca-api.jar --version - * java -jar monasca-api.jar -version - * java -jar monasca-api.jar version - * Really anything with the word version in it will show the - * version as long as there is only one argument - * */ - if (args.length == 1 && args[0].toLowerCase().contains("version")) { - showVersion(); - System.exit(0); - } - - new MonApiApplication().run(args); - } - - private static void showVersion() { - Package pkg; - pkg = Package.getPackage("monasca.api"); - - System.out.println("-------- Version Information --------"); - System.out.println(pkg.getImplementationVersion()); - } - - @Override - public void initialize(Bootstrap bootstrap) { - /** Configure bundles */ - bootstrap.addBundle(new DBIExceptionsBundle()); - } - - @Override - public String getName() { - return "HP Cloud Monitoring"; - } - - @Override - @SuppressWarnings("unchecked") - public void run(ApiConfig config, Environment environment) throws Exception { - /** Wire services */ - Injector.registerModules(new MonApiModule(environment, config)); - - /** Configure resources */ - environment.jersey().register(Injector.getInstance(VersionResource.class)); - environment.jersey().register(Injector.getInstance(AlarmDefinitionResource.class)); - environment.jersey().register(Injector.getInstance(AlarmResource.class)); - environment.jersey().register(Injector.getInstance(DimensionResource.class)); - environment.jersey().register(Injector.getInstance(MetricResource.class)); - environment.jersey().register(Injector.getInstance(MeasurementResource.class)); - environment.jersey().register(Injector.getInstance(StatisticResource.class)); - environment.jersey().register(Injector.getInstance(NotificationMethodResource.class)); - environment.jersey().register(Injector.getInstance(NotificationMethodTypesResource.class)); - - /** Configure providers */ - removeExceptionMappers(environment.jersey().getResourceConfig().getSingletons()); - environment.jersey().register(new EntityExistsExceptionMapper()); - environment.jersey().register(new EntityNotFoundExceptionMapper()); - environment.jersey().register(new IllegalArgumentExceptionMapper()); - environment.jersey().register(new InvalidEntityExceptionMapper()); - environment.jersey().register(new JsonProcessingExceptionMapper()); - environment.jersey().register(new JsonMappingExceptionManager()); - environment.jersey().register(new ConstraintViolationExceptionMapper()); - environment.jersey().register(new ThrowableExceptionMapper() {}); - environment.jersey().register(new MultipleMetricsExceptionMapper()); - - /** Configure Jackson */ - environment.getObjectMapper().setPropertyNamingStrategy( - PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - environment.getObjectMapper().enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY); - environment.getObjectMapper().disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); - environment.getObjectMapper().disable(DeserializationFeature.WRAP_EXCEPTIONS); - SimpleModule module = new SimpleModule("SerializationModule"); - module.addSerializer(new SubAlarmExpressionSerializer()); - environment.getObjectMapper().registerModule(module); - - - /** Configure CORS filter */ - Dynamic corsFilter = environment.servlets().addFilter("cors", CrossOriginFilter.class); - corsFilter.addMappingForUrlPatterns(null, true, "/*"); - corsFilter.setInitParameter("allowedOrigins", "*"); - corsFilter.setInitParameter("allowedHeaders", - "X-Requested-With,Content-Type,Accept,Origin,X-Auth-Token"); - corsFilter.setInitParameter("allowedMethods", "OPTIONS,GET,HEAD"); - - if (config.middleware.enabled) { - ensureHasValue(config.middleware.serverVIP, "serverVIP", "enabled", "true"); - ensureHasValue(config.middleware.serverPort, "serverPort", "enabled", "true"); - ensureHasValue(config.middleware.adminAuthMethod, "adminAuthMethod", "enabled", "true"); - if ("password".equalsIgnoreCase(config.middleware.adminAuthMethod)) { - ensureHasValue(config.middleware.adminUser, "adminUser", "adminAuthMethod", "password"); - ensureHasValue(config.middleware.adminPassword, "adminPassword", "adminAuthMethod", "password"); - } else if ("token".equalsIgnoreCase(config.middleware.adminAuthMethod)) { - ensureHasValue(config.middleware.adminToken, "adminToken", "adminAuthMethod", "token"); - } else { - throw new Exception(String.format( - "Invalid value '%s' for adminAuthMethod. Must be either password or token", - config.middleware.adminAuthMethod)); - } - if (config.middleware.defaultAuthorizedRoles == null || config.middleware.defaultAuthorizedRoles.isEmpty()) { - ensureHasValue(null, "defaultAuthorizedRoles", "enabled", "true"); - } - if (config.middleware.connSSLClientAuth) { - ensureHasValue(config.middleware.keystore, "keystore", "connSSLClientAuth", "true"); - ensureHasValue(config.middleware.keystorePassword, "keystorePassword", "connSSLClientAuth", "true"); - } - Map authInitParams = new HashMap(); - authInitParams.put("ServerVIP", config.middleware.serverVIP); - authInitParams.put("ServerPort", config.middleware.serverPort); - authInitParams.put(AuthConstants.USE_HTTPS, String.valueOf(config.middleware.useHttps)); - authInitParams.put("ConnTimeout", config.middleware.connTimeout); - authInitParams.put("ConnSSLClientAuth", String.valueOf(config.middleware.connSSLClientAuth)); - authInitParams.put("ConnPoolMaxActive", config.middleware.connPoolMaxActive); - authInitParams.put("ConnPoolMaxIdle", config.middleware.connPoolMaxActive); - authInitParams.put("ConnPoolEvictPeriod", config.middleware.connPoolEvictPeriod); - authInitParams.put("ConnPoolMinIdleTime", config.middleware.connPoolMinIdleTime); - authInitParams.put("ConnRetryTimes", config.middleware.connRetryTimes); - authInitParams.put("ConnRetryInterval", config.middleware.connRetryInterval); - authInitParams.put("AdminToken", config.middleware.adminToken); - authInitParams.put("TimeToCacheToken", config.middleware.timeToCacheToken); - authInitParams.put("AdminAuthMethod", config.middleware.adminAuthMethod); - authInitParams.put("AdminUser", config.middleware.adminUser); - authInitParams.put("AdminPassword", config.middleware.adminPassword); - authInitParams.put(AuthConstants.ADMIN_PROJECT_ID, config.middleware.adminProjectId); - authInitParams.put(AuthConstants.ADMIN_PROJECT_NAME, config.middleware.adminProjectName); - authInitParams.put(AuthConstants.ADMIN_USER_DOMAIN_ID, config.middleware.adminUserDomainId); - authInitParams.put(AuthConstants.ADMIN_USER_DOMAIN_NAME, config.middleware.adminUserDomainName); - authInitParams.put(AuthConstants.ADMIN_PROJECT_DOMAIN_ID, config.middleware.adminProjectDomainId); - authInitParams.put(AuthConstants.ADMIN_PROJECT_DOMAIN_NAME, config.middleware.adminProjectDomainName); - authInitParams.put("MaxTokenCacheSize", config.middleware.maxTokenCacheSize); - setIfNotNull(authInitParams, AuthConstants.TRUSTSTORE, config.middleware.truststore); - setIfNotNull(authInitParams, AuthConstants.TRUSTSTORE_PASS, config.middleware.truststorePassword); - setIfNotNull(authInitParams, AuthConstants.KEYSTORE, config.middleware.keystore); - setIfNotNull(authInitParams, AuthConstants.KEYSTORE_PASS, config.middleware.keystorePassword); - - /** Configure auth filters */ - Dynamic preAuthenticationFilter = - environment.servlets().addFilter("pre-auth", new PreAuthenticationFilter()); - preAuthenticationFilter.addMappingForUrlPatterns(null, true, "/"); - preAuthenticationFilter.addMappingForUrlPatterns(null, true, "/v2.0/*"); - - Dynamic tokenAuthFilter = environment.servlets().addFilter("token-auth", new TokenAuth()); - tokenAuthFilter.addMappingForUrlPatterns(null, true, "/"); - tokenAuthFilter.addMappingForUrlPatterns(null, true, "/v2.0/*"); - tokenAuthFilter.setInitParameters(authInitParams); - - Dynamic postAuthenticationFilter = - environment.servlets().addFilter( - "post-auth", - new PostAuthenticationFilter(config.middleware.defaultAuthorizedRoles, - config.middleware.agentAuthorizedRoles, - config.middleware.readOnlyAuthorizedRoles)); - postAuthenticationFilter.addMappingForUrlPatterns(null, true, "/"); - postAuthenticationFilter.addMappingForUrlPatterns(null, true, "/v2.0/*"); - - environment.jersey().getResourceConfig().getContainerRequestFilters() - .add(new RoleAuthorizationFilter()); - } else { - Dynamic mockAuthenticationFilter = - environment.servlets().addFilter("mock-auth", new MockAuthenticationFilter()); - mockAuthenticationFilter.addMappingForUrlPatterns(null, true, "/"); - mockAuthenticationFilter.addMappingForUrlPatterns(null, true, "/v2.0/*"); - } - } - - private void ensureHasValue(final String value, final String what, final String control, - final String controlValue) throws Exception { - if (value == null || value.isEmpty()) { - final String message = - String - .format( - "Since %s in middleware section of configuration file is set to %s, %s must have a value", - control, controlValue, what); - throw new Exception(message); - } - } - - private void setIfNotNull(Map authInitParams, String name, String value) { - if (value != null) { - authInitParams.put(name, value); - } - } - - private void removeExceptionMappers(Set items) { - for (Iterator i = items.iterator(); i.hasNext();) { - Object o = i.next(); - if (o instanceof ExceptionMapper) - i.remove(); - } - } -} diff --git a/java/src/main/java/monasca/api/MonApiModule.java b/java/src/main/java/monasca/api/MonApiModule.java deleted file mode 100644 index b6e03ff8f..000000000 --- a/java/src/main/java/monasca/api/MonApiModule.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api; - -import ch.qos.logback.classic.Level; -import io.dropwizard.db.DataSourceFactory; -import io.dropwizard.jdbi.DBIFactory; -import io.dropwizard.setup.Environment; - -import java.util.Arrays; -import java.util.Properties; - -import javax.inject.Named; -import javax.inject.Singleton; - -import kafka.javaapi.producer.Producer; -import kafka.producer.ProducerConfig; - -import org.hibernate.SessionFactory; -import org.hibernate.boot.registry.StandardServiceRegistryBuilder; -import org.hibernate.cfg.Configuration; -import org.hibernate.service.ServiceRegistry; -import org.skife.jdbi.v2.DBI; - -import com.codahale.metrics.MetricRegistry; -import com.google.common.base.Joiner; -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.google.inject.ProvisionException; -import com.google.inject.name.Names; - -import monasca.api.app.ApplicationModule; -import monasca.api.domain.DomainModule; -import monasca.api.infrastructure.InfrastructureModule; -import monasca.common.hibernate.db.AlarmActionDb; -import monasca.common.hibernate.db.AlarmActionId; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.AlarmMetricDb; -import monasca.common.hibernate.db.AlarmMetricId; -import monasca.common.hibernate.db.MetricDefinitionDb; -import monasca.common.hibernate.db.MetricDefinitionDimensionsDb; -import monasca.common.hibernate.db.MetricDimensionDb; -import monasca.common.hibernate.db.NotificationMethodDb; -import monasca.common.hibernate.db.NotificationMethodTypesDb; -import monasca.common.hibernate.db.SubAlarmDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb; - -/** - * Monitoring API server bindings. - */ -public class MonApiModule - extends AbstractModule { - /** - * PostgresSQL {@link javax.sql.DataSource} class name - */ - private static final String POSTGRES_DS_CLASS = "org.postgresql.ds.PGPoolingDataSource"; - /** - * MySQL {@link javax.sql.DataSource} class name - */ - private static final String MYSQL_DS_CLASS = "com.mysql.jdbc.jdbc2.optional.MysqlDataSource"; - private final ApiConfig config; - private final Environment environment; - - public MonApiModule(Environment environment, ApiConfig config) { - this.environment = environment; - this.config = config; - } - - @Override - protected void configure() { - bind(ApiConfig.class).toInstance(config); - bind(MetricRegistry.class).toInstance(environment.metrics()); - if (!this.isHibernateEnabled()) { - bind(DataSourceFactory.class).annotatedWith(Names.named("mysql")).toInstance(config.mysql); - } - bind(DataSourceFactory.class).annotatedWith(Names.named("vertica")).toInstance(config.vertica); - - install(new ApplicationModule()); - install(new DomainModule()); - install(new InfrastructureModule(this.config)); - } - - @Provides - @Singleton - @Named("orm") - public SessionFactory getSessionFactory() { - - if (config.hibernate == null) { - throw new ProvisionException("Unable to provision ORM DBI, couldn't locate hibernate configuration"); - } - - try { - Configuration configuration = new Configuration(); - - configuration.addAnnotatedClass(AlarmDb.class); - configuration.addAnnotatedClass(AlarmActionDb.class); - configuration.addAnnotatedClass(AlarmActionId.class); - configuration.addAnnotatedClass(AlarmDefinitionDb.class); - configuration.addAnnotatedClass(AlarmMetricDb.class); - configuration.addAnnotatedClass(AlarmMetricId.class); - configuration.addAnnotatedClass(MetricDefinitionDb.class); - configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class); - configuration.addAnnotatedClass(MetricDimensionDb.class); - configuration.addAnnotatedClass(SubAlarmDefinitionDb.class); - configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class); - configuration.addAnnotatedClass(SubAlarmDb.class); - configuration.addAnnotatedClass(NotificationMethodDb.class); - configuration.addAnnotatedClass(NotificationMethodTypesDb.class); - - configuration.setProperties(this.getORMProperties(this.config.hibernate.getDataSourceClassName())); - ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties()).build(); - - // builds a session factory from the service registry - return configuration.buildSessionFactory(serviceRegistry); - } catch (Throwable ex) { - throw new ProvisionException("Failed to provision ORM DBI", ex); - } - } - - @Provides - @Singleton - @Named("mysql") - public DBI getMySqlDBI() { - try { - return new DBIFactory().build(environment, config.mysql, "mysql"); - } catch (ClassNotFoundException e) { - throw new ProvisionException("Failed to provision MySQL DBI", e); - } - } - - @Provides - @Singleton - @Named("vertica") - public DBI getVerticaDBI() { - try { - return new DBIFactory().build(environment, config.vertica, "vertica"); - } catch (ClassNotFoundException e) { - throw new ProvisionException("Failed to provision Vertica DBI", e); - } - } - - @Provides - @Singleton - public Producer getProducer() { - Properties props = new Properties(); - props.put("metadata.broker.list", Joiner.on(',').join(config.kafka.brokerUris)); - props.put("serializer.class", "kafka.serializer.StringEncoder"); - props.put("request.required.acks", "1"); - ProducerConfig config = new ProducerConfig(props); - return new Producer(config); - } - - private Properties getORMProperties(final String dataSourceClassName) { - final Properties properties = new Properties(); - - // different drivers requires different sets of properties - switch (dataSourceClassName) { - case POSTGRES_DS_CLASS: - this.handlePostgresORMProperties(properties); - break; - case MYSQL_DS_CLASS: - this.handleMySQLORMProperties(properties); - break; - default: - throw new ProvisionException( - String.format( - "%s is not supported, valid data sources are %s", - dataSourceClassName, - Arrays.asList(POSTGRES_DS_CLASS, MYSQL_DS_CLASS) - ) - ); - } - // different drivers requires different sets of properties - - // driver agnostic properties - this.handleCommonORMProperties(properties); - // driver agnostic properties - - return properties; - } - - private void handleCommonORMProperties(final Properties properties) { - properties.put("hibernate.connection.provider_class", this.config.hibernate.getProviderClass()); - properties.put("hibernate.hbm2ddl.auto", this.config.hibernate.getAutoConfig()); - properties.put("show_sql", this.config.getLoggingFactory().getLevel().equals(Level.DEBUG)); - properties.put("hibernate.hikari.dataSource.user", this.config.hibernate.getUser()); - properties.put("hibernate.hikari.dataSource.password", this.config.hibernate.getPassword()); - properties.put("hibernate.hikari.dataSourceClassName", this.config.hibernate.getDataSourceClassName()); - } - - private void handleMySQLORMProperties(final Properties properties) { - properties.put("hibernate.hikari.dataSource.url", this.config.hibernate.getDataSourceUrl()); - } - - private void handlePostgresORMProperties(final Properties properties) { - properties.put("hibernate.hikari.dataSource.serverName", this.config.hibernate.getServerName()); - properties.put("hibernate.hikari.dataSource.portNumber", this.config.hibernate.getPortNumber()); - properties.put("hibernate.hikari.dataSource.databaseName", this.config.hibernate.getDatabaseName()); - properties.put("hibernate.hikari.dataSource.initialConnections", this.config.hibernate.getInitialConnections()); - properties.put("hibernate.hikari.dataSource.maxConnections", this.config.hibernate.getMaxConnections()); - properties.put("hibernate.hikari.connectionTestQuery", "SELECT 1"); - properties.put("hibernate.hikari.connectionTimeout", "5000"); - properties.put("hibernate.hikari.initializationFailFast", "false"); - } - - private boolean isHibernateEnabled() { - return this.config.hibernate != null && this.config.hibernate.getSupportEnabled(); - } -} diff --git a/java/src/main/java/monasca/api/app/AlarmDefinitionService.java b/java/src/main/java/monasca/api/app/AlarmDefinitionService.java deleted file mode 100644 index 37d2d2642..000000000 --- a/java/src/main/java/monasca/api/app/AlarmDefinitionService.java +++ /dev/null @@ -1,417 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; - -import javax.annotation.Nullable; -import javax.inject.Inject; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; -import com.google.common.collect.Sets; - -import monasca.api.ApiConfig; -import monasca.api.app.command.UpdateAlarmDefinitionCommand; -import monasca.api.app.validation.DimensionValidation; -import monasca.common.model.event.AlarmDefinitionCreatedEvent; -import monasca.common.model.event.AlarmDefinitionDeletedEvent; -import monasca.common.model.event.AlarmDefinitionUpdatedEvent; -import monasca.common.model.event.AlarmDeletedEvent; -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.exception.InvalidEntityException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.common.util.Exceptions; -import monasca.common.util.Serialization; - -/** - * Services alarm definition related requests. - */ -public class AlarmDefinitionService { - private static final Logger LOG = LoggerFactory.getLogger(AlarmService.class); - - private final ApiConfig config; - private final Producer producer; - private final AlarmDefinitionRepo repo; - private final AlarmRepo alarmRepo; - private final NotificationMethodRepo notificationMethodRepo; - long eventCount; - - @Inject - public AlarmDefinitionService(ApiConfig config, Producer producer, - AlarmDefinitionRepo repo, AlarmRepo alarmRepo, - NotificationMethodRepo notificationMethodRepo) { - this.config = config; - this.producer = producer; - this.repo = repo; - this.alarmRepo = alarmRepo; - this.notificationMethodRepo = notificationMethodRepo; - } - - static class SubExpressions { - /** Sub expressions which have been removed from an updated alarm expression. */ - Map oldAlarmSubExpressions; - /** Sub expressions which have had their operator or threshold changed. */ - Map changedSubExpressions; - /** Sub expressions which have not changed. */ - Map unchangedSubExpressions; - /** Sub expressions which have been added to an updated alarm expression. */ - Map newAlarmSubExpressions; - } - - /** - * Creates an alarm definition and publishes an AlarmDefinitionCreatedEvent. Note, the event is - * published first since chances of failure are higher. - * - * @throws EntityExistsException if an alarm already exists for the name - * @throws InvalidEntityException if one of the actions cannot be found - */ - public AlarmDefinition create(String tenantId, String name, @Nullable String description, - String severity, String expression, AlarmExpression alarmExpression, List matchBy, - List alarmActions, @Nullable List okActions, - @Nullable List undeterminedActions) { - // Assert no alarm exists by the name - String alarmDefID=repo.exists(tenantId, name); - if (alarmDefID!=null) { - throw new EntityExistsException( - "An alarm definition already exists for project / tenant: %s named: %s", tenantId, name); - } - DimensionValidation.validateNames(matchBy); - assertActionsExist(tenantId, alarmActions, okActions, undeterminedActions); - - Map subAlarms = new HashMap(); - for (AlarmSubExpression subExpression : alarmExpression.getSubExpressions()) - subAlarms.put(UUID.randomUUID().toString(), subExpression); - - String alarmDefId = UUID.randomUUID().toString(); - AlarmDefinition alarm = null; - - try { - LOG.debug("Creating alarm definition {} for tenant {}", name, tenantId); - alarm = - repo.create(tenantId, alarmDefId, name, description, severity, expression, subAlarms, - matchBy, alarmActions, okActions, undeterminedActions); - - // Notify interested parties of new alarm - String event = - Serialization.toJson(new AlarmDefinitionCreatedEvent(tenantId, alarmDefId, name, - description, expression, subAlarms, matchBy)); - producer.send(new KeyedMessage<>(config.eventsTopic, String.valueOf(eventCount++), event)); - - return alarm; - } catch (Exception e) { - if (alarm != null) - try { - repo.deleteById(tenantId, alarm.getId()); - } catch (Exception ignore) { - } - throw Exceptions.uncheck(e, "Error creating alarm definition for project / tenant %s", - tenantId); - } - } - - /** - * Deletes the alarm definition identified by the {@code alarmDefId}. - * - * @throws EntityNotFoundException if the alarm cannot be found - */ - public void delete(String tenantId, String alarmDefId) { - Map subAlarmMetricDefs = - repo.findSubAlarmMetricDefinitions(alarmDefId); - - // Have to get information about the Alarms before they are deleted. They will be deleted - // by the database as a cascade delete from the Alarm Definition delete - final List alarms = alarmRepo.find(tenantId, alarmDefId, null, null, null, null, null, null, null, null, null, 1, false); - final Map> alarmSubExpressions = - alarmRepo.findAlarmSubExpressionsForAlarmDefinition(alarmDefId); - - repo.deleteById(tenantId, alarmDefId); - - // Notify interested parties of alarm definition deletion - String event = - Serialization.toJson(new AlarmDefinitionDeletedEvent(alarmDefId, subAlarmMetricDefs)); - producer.send(new KeyedMessage<>(config.eventsTopic, String.valueOf(eventCount++), event)); - - // Notify about the Deletion of the Alarms second because that is the order that thresh - // wants it so Alarms don't get recreated - for (final Alarm alarm : alarms) { - String alarmDeletedEvent = - Serialization.toJson(new AlarmDeletedEvent(tenantId, alarm.getId(), alarm.getMetrics(), - alarmDefId, alarmSubExpressions.get(alarm.getId()))); - producer.send(new KeyedMessage<>(config.eventsTopic, String.valueOf(eventCount++), alarmDeletedEvent)); - } - } - - /** - * Updates the alarm definition for the {@code tenantId} and - * {@code alarmDefId} to the state of the {@code command}. - * - * @throws EntityNotFoundException - * if the alarm cannot be found - * @throws InvalidEntityException - * if one of the actions cannot be found - */ - public AlarmDefinition update(String tenantId, String alarmDefId, - AlarmExpression alarmExpression, - UpdateAlarmDefinitionCommand command) { - final AlarmDefinition oldAlarmDefinition = assertAlarmDefinitionExists( - tenantId, alarmDefId, command.alarmActions, command.okActions, - command.undeterminedActions); - final SubExpressions subExpressions = subExpressionsFor( - repo.findSubExpressions(alarmDefId), alarmExpression); - String alarmID = repo.exists(tenantId, command.name); - if (alarmID != null && !alarmID.equalsIgnoreCase(alarmDefId)) { - throw new EntityExistsException( - "An alarm definition with the same name already exists for project / tenant: %s named: %s", - tenantId, command.name); - } - validateChangesAllowed(command.matchBy, oldAlarmDefinition, - subExpressions); - updateInternal(tenantId, alarmDefId, false, command.name, - command.description, command.expression, command.matchBy, - command.severity, alarmExpression, command.actionsEnabled, - command.alarmActions, command.okActions, - command.undeterminedActions, subExpressions); - return new AlarmDefinition(alarmDefId, command.name, - command.description, command.severity, command.expression, - command.matchBy, command.actionsEnabled, command.alarmActions, - command.okActions, command.undeterminedActions); - } - - /** - * Don't allow changes that would cause existing Alarms for this AlarmDefinition to be invalidated. - * - * matchBy can't change and the expression can't change the metrics used or number of subexpressions - */ - private void validateChangesAllowed(final List newMatchBy, - final AlarmDefinition oldAlarmDefinition, final SubExpressions subExpressions) { - final boolean matchBySame; - if (oldAlarmDefinition.getMatchBy() == null || oldAlarmDefinition.getMatchBy().isEmpty()) { - matchBySame = newMatchBy == null || newMatchBy.isEmpty(); - } - else { - matchBySame = oldAlarmDefinition.getMatchBy().equals(newMatchBy); - } - if (!matchBySame) { - throw monasca.api.resource.exception.Exceptions.unprocessableEntity("match_by must not change"); - } - if (!subExpressions.oldAlarmSubExpressions.isEmpty() || !subExpressions.newAlarmSubExpressions.isEmpty()) { - final int newCount = subExpressions.newAlarmSubExpressions.size() + - subExpressions.changedSubExpressions.size() + - subExpressions.unchangedSubExpressions.size(); - if (newCount != AlarmExpression.of(oldAlarmDefinition.getExpression()).getSubExpressions().size()) { - throw monasca.api.resource.exception.Exceptions.unprocessableEntity("number of subexpressions must not change"); - } - else { - throw monasca.api.resource.exception.Exceptions.unprocessableEntity("metrics in subexpression must not change"); - } - } - } - - /** - * Patches the alarm definition for the {@code tenantId} and {@code alarmDefId} to the state of - * the {@code fields}. - * - * @throws EntityNotFoundException if the alarm cannot be found - * @throws InvalidEntityException if one of the actions cannot be found - */ - public AlarmDefinition patch(String tenantId, String alarmDefId, - String name, String description, String severity, - String expression, AlarmExpression alarmExpression, - List matchBy, Boolean enabled, List alarmActions, - List okActions, List undeterminedActions) { - AlarmDefinition oldAlarmDefinition = assertAlarmDefinitionExists( - tenantId, alarmDefId, alarmActions, okActions, - undeterminedActions); - name = name == null ? oldAlarmDefinition.getName() : name; - String alarmID = repo.exists(tenantId, name); - if (alarmID != null && !alarmID.equalsIgnoreCase(alarmDefId)) { - throw new EntityExistsException( - "An alarm definition with the same name already exists for project / tenant: %s named: %s", - tenantId, name); - } - description = description == null ? oldAlarmDefinition.getDescription() - : description; - expression = expression == null ? oldAlarmDefinition.getExpression() - : expression; - severity = severity == null ? oldAlarmDefinition.getSeverity() - : severity; - alarmExpression = alarmExpression == null ? AlarmExpression - .of(expression) : alarmExpression; - enabled = enabled == null ? oldAlarmDefinition.isActionsEnabled() - : enabled; - matchBy = matchBy == null ? oldAlarmDefinition.getMatchBy() : matchBy; - - final SubExpressions subExpressions = subExpressionsFor( - repo.findSubExpressions(alarmDefId), alarmExpression); - validateChangesAllowed(matchBy, oldAlarmDefinition, subExpressions); - updateInternal(tenantId, alarmDefId, true, name, description, - expression, matchBy, severity, alarmExpression, enabled, - alarmActions, okActions, undeterminedActions, subExpressions); - - return new AlarmDefinition(alarmDefId, name, description, severity, - expression, matchBy, enabled, - alarmActions == null ? oldAlarmDefinition.getAlarmActions() - : alarmActions, - okActions == null ? oldAlarmDefinition.getOkActions() - : okActions, - undeterminedActions == null ? oldAlarmDefinition - .getUndeterminedActions() : undeterminedActions); - } - - private void updateInternal(String tenantId, String alarmDefId, - boolean patch, String name, String description, String expression, - List matchBy, String severity, - AlarmExpression alarmExpression, Boolean enabled, - List alarmActions, List okActions, - List undeterminedActions, SubExpressions subExpressions) { - - try { - LOG.debug("Updating alarm definition {} for tenant {}", name, - tenantId); - repo.update(tenantId, alarmDefId, patch, name, description, - expression, matchBy, severity, enabled, - subExpressions.oldAlarmSubExpressions.keySet(), - subExpressions.changedSubExpressions, - subExpressions.newAlarmSubExpressions, alarmActions, - okActions, undeterminedActions); - - // Notify interested parties of updated alarm - String event = Serialization - .toJson(new AlarmDefinitionUpdatedEvent(tenantId, - alarmDefId, name, description, expression, matchBy, - enabled, severity, - subExpressions.oldAlarmSubExpressions, - subExpressions.changedSubExpressions, - subExpressions.unchangedSubExpressions, - subExpressions.newAlarmSubExpressions)); - producer.send(new KeyedMessage<>(config.eventsTopic, String - .valueOf(eventCount++), event)); - } catch (Exception e) { - throw Exceptions.uncheck(e, - "Error updating alarm definition for project / tenant %s", - tenantId); - } - } - - /** - * Returns an entry containing Maps of old, changed, and new sub expressions by comparing the - * {@code alarmExpression} to the existing sub expressions for the {@code alarmDefId}. - */ - SubExpressions subExpressionsFor(final Map initialSubExpressions, - AlarmExpression alarmExpression) { - BiMap oldExpressions = - HashBiMap.create(initialSubExpressions); - Set oldSet = oldExpressions.inverse().keySet(); - Set newSet = new HashSet<>(alarmExpression.getSubExpressions()); - - // Identify old or changed expressions - Set oldOrChangedExpressions = - new HashSet<>(Sets.difference(oldSet, newSet)); - - // Identify new or changed expressions - Set newOrChangedExpressions = - new HashSet<>(Sets.difference(newSet, oldSet)); - - // Find changed expressions - Map changedExpressions = new HashMap<>(); - for (Iterator oldIt = oldOrChangedExpressions.iterator(); oldIt.hasNext();) { - AlarmSubExpression oldExpr = oldIt.next(); - for (Iterator newIt = newOrChangedExpressions.iterator(); newIt.hasNext();) { - AlarmSubExpression newExpr = newIt.next(); - if (sameKeyFields(oldExpr, newExpr)) { - oldIt.remove(); - newIt.remove(); - changedExpressions.put(oldExpressions.inverse().get(oldExpr), newExpr); - } - } - } - - // Create the list of unchanged expressions - BiMap unchangedExpressions = HashBiMap.create(oldExpressions); - unchangedExpressions.values().removeAll(oldOrChangedExpressions); - unchangedExpressions.keySet().removeAll(changedExpressions.keySet()); - - // Remove old sub expressions - oldExpressions.values().retainAll(oldOrChangedExpressions); - - // Create IDs for new expressions - Map newExpressions = new HashMap<>(); - for (AlarmSubExpression expression : newOrChangedExpressions) - newExpressions.put(UUID.randomUUID().toString(), expression); - - SubExpressions subExpressions = new SubExpressions(); - subExpressions.oldAlarmSubExpressions = oldExpressions; - subExpressions.changedSubExpressions = changedExpressions; - subExpressions.unchangedSubExpressions = unchangedExpressions; - subExpressions.newAlarmSubExpressions = newExpressions; - return subExpressions; - } - - /** - * Returns whether all of the metrics of {@code a} and {@code b} are the same. The Threshold - * Engine can handle any other type of change to the expression - */ - private boolean sameKeyFields(AlarmSubExpression a, AlarmSubExpression b) { - return a.getMetricDefinition().equals(b.getMetricDefinition()); - } - - /** - * Asserts an alarm definition exists for the {@code alarmDefId} as well as the actions. - * - * @throws EntityNotFoundException if the alarm cannot be found - */ - private AlarmDefinition assertAlarmDefinitionExists(String tenantId, String alarmDefId, - List alarmActions, List okActions, List undeterminedActions) { - AlarmDefinition alarm = repo.findById(tenantId, alarmDefId); - assertActionsExist(tenantId, alarmActions, okActions, undeterminedActions); - return alarm; - } - - private void assertActionsExist(String tenantId, List alarmActions, - List okActions, List undeterminedActions) { - Set actions = new HashSet<>(); - if (alarmActions != null) - actions.addAll(alarmActions); - if (okActions != null) - actions.addAll(okActions); - if (undeterminedActions != null) - actions.addAll(undeterminedActions); - if (!actions.isEmpty()) - for (String action : actions) - if (!notificationMethodRepo.exists(tenantId, action)) - throw monasca.api.resource.exception.Exceptions.unprocessableEntity( - "No notification method exists for action %s", action); - } -} diff --git a/java/src/main/java/monasca/api/app/AlarmService.java b/java/src/main/java/monasca/api/app/AlarmService.java deleted file mode 100644 index 00776242c..000000000 --- a/java/src/main/java/monasca/api/app/AlarmService.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright (c) 2014-2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app; - -import java.util.Map; - -import javax.inject.Inject; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.ApiConfig; -import monasca.api.app.command.UpdateAlarmCommand; -import monasca.common.model.event.AlarmDeletedEvent; -import monasca.common.model.event.AlarmStateTransitionedEvent; -import monasca.common.model.event.AlarmUpdatedEvent; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.exception.InvalidEntityException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.common.util.Exceptions; -import monasca.common.util.Serialization; - -/** - * Services alarmed metric related requests. - */ -public class AlarmService { - private static final Logger LOG = LoggerFactory.getLogger(AlarmService.class); - - private final ApiConfig config; - private final Producer producer; - private final AlarmRepo repo; - private final AlarmDefinitionRepo alarmDefRepo; - private long messageCount = 0; - - @Inject - public AlarmService(ApiConfig config, Producer producer, - AlarmRepo repo, AlarmDefinitionRepo alarmDefRepo) { - this.config = config; - this.producer = producer; - this.repo = repo; - this.alarmDefRepo = alarmDefRepo; - } - - /** - * Deletes the alarm identified by the {@code alarmId - * }. - * - * @throws EntityNotFoundException if the alarm cannot be found - */ - public void delete(String tenantId, String alarmId) { - Alarm alarm = repo.findById(tenantId, alarmId); - Map subAlarmMetricDefs = repo.findAlarmSubExpressions(alarmId); - repo.deleteById(tenantId, alarmId); - - // Notify interested parties of alarm deletion - String event = - Serialization.toJson(new AlarmDeletedEvent(tenantId, alarmId, alarm.getMetrics(), alarm - .getAlarmDefinition().getId(), subAlarmMetricDefs)); - producer.send(new KeyedMessage<>(config.eventsTopic, String.valueOf(messageCount++), event)); - } - - /** - * Patches the alarm for the {@code tenantId} and {@code alarmId} to the state of the - * {@code fields}. - * - * @throws EntityNotFoundException if the alarm cannot be found - * @throws InvalidEntityException if one of the actions cannot be found - */ - public Alarm patch(String tenantId, String alarmId, AlarmState state, String lifecycleState, - String link) { - Alarm oldAlarm = repo.findById(tenantId, alarmId); - - if (state == null && lifecycleState == null && link == null) { - return oldAlarm; - } - - state = (state == null) ? oldAlarm.getState() : state; - lifecycleState = (lifecycleState == null) ? oldAlarm.getLifecycleState() : lifecycleState; - link = (link == null) ? oldAlarm.getLink() : link; - - Alarm alarm = updateInternal(tenantId, alarmId, state, lifecycleState, link); - return alarm; - } - - /** - * Updates the alarmed metric for the {@code tenantId} and {@code alarmedMetricId} to the state of - * the {@code command}. - * - * @throws EntityNotFoundException if the alarmed metric cannot be found - */ - public Alarm update(String tenantId, String alarmId, UpdateAlarmCommand command) { - Alarm alarm = updateInternal(tenantId, alarmId, command.state, command.lifecycleState, command.link); - return alarm; - } - - private String stateChangeReasonFor(AlarmState oldState, AlarmState newState) { - return "Alarm state updated via API"; - } - - private Alarm updateInternal(String tenantId, String alarmId, AlarmState newState, - String newLifecycleState, String newLink) { - try { - LOG.debug("Updating alarm {} for tenant {}", alarmId, tenantId); - final Alarm alarm = repo.update(tenantId, alarmId, newState, newLifecycleState, newLink); - final AlarmState oldState = alarm.getState(); - // Notify interested parties of updated alarm - AlarmDefinition alarmDef = alarmDefRepo.findById(tenantId, alarm.getAlarmDefinition().getId()); - Map subAlarms = repo.findAlarmSubExpressions(alarmId); - String event = - Serialization.toJson(new AlarmUpdatedEvent(alarmId, alarmDef.getId(), - tenantId, alarm.getMetrics(), subAlarms, newState, oldState, newLink, newLifecycleState)); - producer.send(new KeyedMessage<>(config.eventsTopic, String.valueOf(messageCount++), event)); - - // Notify interested parties of transitioned alarm state - if (!oldState.equals(newState)) { - event = - Serialization.toJson(new AlarmStateTransitionedEvent(tenantId, alarmId, alarmDef - .getId(), alarm.getMetrics(), alarmDef.getName(), alarmDef.getDescription(), - oldState, newState, alarmDef.getSeverity(), newLink, newLifecycleState, alarmDef.isActionsEnabled(), - stateChangeReasonFor(oldState, newState), null, System.currentTimeMillis())); - producer.send(new KeyedMessage<>(config.alarmStateTransitionsTopic, String.valueOf(messageCount++), event)); - } - alarm.setState(newState); - alarm.setLifecycleState(newLifecycleState); - alarm.setLink(newLink); - return alarm; - } catch (EntityNotFoundException e) { - throw e; - } catch (Exception e) { - throw Exceptions.uncheck(e, "Error updating alarm for project / tenant %s", tenantId); - } - } -} diff --git a/java/src/main/java/monasca/api/app/ApplicationModule.java b/java/src/main/java/monasca/api/app/ApplicationModule.java deleted file mode 100644 index 200013f2b..000000000 --- a/java/src/main/java/monasca/api/app/ApplicationModule.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app; - -import javax.inject.Singleton; - -import com.google.inject.AbstractModule; - -/** - * Application layer bindings. - */ -public class ApplicationModule extends AbstractModule { - @Override - protected void configure() { - bind(MetricService.class).in(Singleton.class); - bind(AlarmDefinitionService.class).in(Singleton.class); - bind(AlarmService.class).in(Singleton.class); - } -} diff --git a/java/src/main/java/monasca/api/app/MetricService.java b/java/src/main/java/monasca/api/app/MetricService.java deleted file mode 100644 index 73784bd5f..000000000 --- a/java/src/main/java/monasca/api/app/MetricService.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import javax.annotation.Nullable; -import javax.inject.Inject; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; - -import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricRegistry; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; - -import monasca.api.ApiConfig; -import monasca.common.model.metric.Metric; -import monasca.common.model.metric.MetricEnvelope; -import monasca.common.model.metric.MetricEnvelopes; - -/** - * Metric service implementation. - */ -public class MetricService { - private final ApiConfig config; - private final Producer producer; - private final Meter metricMeter; - private final Meter batchMeter; - - @Inject - public MetricService(ApiConfig config, Producer producer, - MetricRegistry metricRegistry) { - this.config = config; - this.producer = producer; - metricMeter = - metricRegistry.meter(MetricRegistry.name(MetricService.class, "metrics.published")); - batchMeter = - metricRegistry.meter(MetricRegistry.name(MetricService.class, "batches.published")); - } - - public void create(List metrics, String tenantId, @Nullable String crossTenantId) { - String metricTenantId = Strings.isNullOrEmpty(crossTenantId) ? tenantId : crossTenantId; - Builder metaBuilder = - new ImmutableMap.Builder().put("tenantId", metricTenantId).put("region", - config.region); - ImmutableMap meta = metaBuilder.build(); - - List> keyedMessages = new ArrayList<>(metrics.size()); - for (Metric metric : metrics) { - MetricEnvelope envelope = new MetricEnvelope(metric, meta); - keyedMessages.add(new KeyedMessage<>(config.metricsTopic, buildKey(metricTenantId, metric), - MetricEnvelopes.toJson(envelope))); - metricMeter.mark(); - } - - producer.send(keyedMessages); - batchMeter.mark(); - } - - private String buildKey(String metricTenantId, Metric metric) { - final StringBuilder key = new StringBuilder(metricTenantId); - key.append(metric.name); - - // Dimensions are optional. - if (metric.dimensions != null && !metric.dimensions.isEmpty()) { - - // Key must be the same for the same metric so sort the dimensions so they will be - // in a known order - for (final Map.Entry dim : buildSortedDimSet(metric.dimensions)) { - key.append(dim.getKey()); - key.append(dim.getValue()); - } - } - String keyValue = key.toString(); - return keyValue; - } - - private List> buildSortedDimSet(final Map dimMap) { - final List> dims = new ArrayList<>(dimMap.entrySet()); - Collections.sort(dims, new Comparator>() { - @Override - public int compare(Entry o1, Entry o2) { - int nameCmp = o1.getKey().compareTo(o2.getKey()); - return (nameCmp != 0 ? nameCmp : o1.getValue().compareTo(o2.getValue())); - } - }); - return dims; - } -} diff --git a/java/src/main/java/monasca/api/app/command/CreateAlarmDefinitionCommand.java b/java/src/main/java/monasca/api/app/command/CreateAlarmDefinitionCommand.java deleted file mode 100644 index ff899614f..000000000 --- a/java/src/main/java/monasca/api/app/command/CreateAlarmDefinitionCommand.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import java.util.List; - -import javax.annotation.Nullable; - -import org.hibernate.validator.constraints.NotEmpty; - -import monasca.api.app.validation.AlarmValidation; - -public class CreateAlarmDefinitionCommand { - @NotEmpty - public String name; - public String description; - @NotEmpty - public String expression; - public List matchBy; - public String severity; - public List alarmActions; - public List okActions; - public List undeterminedActions; - - public CreateAlarmDefinitionCommand() { - this.severity = "LOW"; - } - - public CreateAlarmDefinitionCommand(String name, @Nullable String description, String expression, - List matchBy, String severity, List alarmActions, List okActions, - List undeterminedActions) { - this.name = name; - this.description = description; - this.expression = expression; - this.matchBy = matchBy; - this.alarmActions = alarmActions; - this.okActions = okActions; - this.undeterminedActions = undeterminedActions; - this.severity = severity == null ? "LOW" : severity; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof CreateAlarmDefinitionCommand)) - return false; - CreateAlarmDefinitionCommand other = (CreateAlarmDefinitionCommand) obj; - if (alarmActions == null) { - if (other.alarmActions != null) - return false; - } else if (!alarmActions.equals(other.alarmActions)) - return false; - if (description == null) { - if (other.description != null) - return false; - } else if (!description.equals(other.description)) - return false; - if (expression == null) { - if (other.expression != null) - return false; - } else if (!expression.equals(other.expression)) - return false; - if (matchBy == null) { - if (other.matchBy != null) - return false; - } else if (!matchBy.equals(other.matchBy)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (okActions == null) { - if (other.okActions != null) - return false; - } else if (!okActions.equals(other.okActions)) - return false; - if (severity == null) { - if (other.severity != null) - return false; - } else if (!severity.equals(other.severity)) - return false; - if (undeterminedActions == null) { - if (other.undeterminedActions != null) - return false; - } else if (!undeterminedActions.equals(other.undeterminedActions)) - return false; - return true; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((alarmActions == null) ? 0 : alarmActions.hashCode()); - result = prime * result + ((description == null) ? 0 : description.hashCode()); - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - result = prime * result + ((matchBy == null) ? 0 : matchBy.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((okActions == null) ? 0 : okActions.hashCode()); - result = prime * result + ((severity == null) ? 0 : severity.hashCode()); - result = prime * result + ((undeterminedActions == null) ? 0 : undeterminedActions.hashCode()); - return result; - } - - public void validate() { - AlarmValidation.validate(name, description, severity, alarmActions, okActions, - undeterminedActions); - } -} diff --git a/java/src/main/java/monasca/api/app/command/CreateMetricCommand.java b/java/src/main/java/monasca/api/app/command/CreateMetricCommand.java deleted file mode 100644 index 4fc76b95e..000000000 --- a/java/src/main/java/monasca/api/app/command/CreateMetricCommand.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import java.util.Map; - -import javax.annotation.Nullable; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; - -import org.hibernate.validator.constraints.NotEmpty; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import monasca.api.app.validation.DimensionValidation; -import monasca.api.app.validation.MetricNameValidation; -import monasca.api.app.validation.ValueMetaValidation; -import monasca.common.model.metric.Metric; -import monasca.api.resource.exception.Exceptions; - -public class CreateMetricCommand { - private static final long TIME_2MIN_MILLIS = 120*1000; - private static final long TIME_2WEEKS_MILLIS = 1209600*1000; - public static final int MAX_NAME_LENGTH = 255; - - @NotEmpty - @Size(min = 1, max = MAX_NAME_LENGTH) - public String name; - public Map dimensions; - @NotNull - public Long timestamp; - @NotNull - public Double value; - public Map valueMeta; - - public CreateMetricCommand() {} - - public CreateMetricCommand(String name, @Nullable Map dimensions, - long timestamp, double value, @Nullable Map valueMeta) { - setName(name); - setDimensions(dimensions); - this.timestamp = timestamp; - setValueMeta(valueMeta); - this.value = value; - } - - private static void validateTimestamp(long timestamp) { - long time = System.currentTimeMillis(); - if (timestamp > time + TIME_2MIN_MILLIS || timestamp < time - TIME_2WEEKS_MILLIS) - throw Exceptions.unprocessableEntity("Timestamp %s is out of legal range", timestamp); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - CreateMetricCommand other = (CreateMetricCommand) obj; - if (dimensions == null) { - if (other.dimensions != null) - return false; - } else if (!dimensions.equals(other.dimensions)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (timestamp != other.timestamp) - return false; - if (Double.doubleToLongBits(value) != Double.doubleToLongBits(other.value)) - return false; - return true; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((timestamp == null) ? 0: (int) (timestamp ^ (timestamp >>> 32))); - long temp; - temp = (value == null) ? 0 : Double.doubleToLongBits(value); - result = prime * result + (int) (temp ^ (temp >>> 32)); - return result; - } - - @JsonProperty - public void setDimensions(Map dimensions) { - this.dimensions = - dimensions == null || dimensions.isEmpty() ? null : DimensionValidation - .normalize(dimensions); - } - - @JsonProperty - public void setValueMeta(Map valueMeta) { - this.valueMeta = ValueMetaValidation.normalize(valueMeta); - } - - @JsonProperty - public void setName(String name) { - this.name = MetricNameValidation.normalize(name); - } - - public Metric toMetric() { - return new Metric(name, dimensions, timestamp, value, valueMeta); - } - - public void validate(boolean validateTimestamp) { - // Validate name and dimensions - MetricNameValidation.validate(name, true); - if (dimensions != null) { - DimensionValidation.validate(dimensions); - } - if (valueMeta != null) { - ValueMetaValidation.validate(valueMeta); - } - - // Validate times and values - if (validateTimestamp) { - validateTimestamp(timestamp); - } - } -} diff --git a/java/src/main/java/monasca/api/app/command/CreateNotificationMethodCommand.java b/java/src/main/java/monasca/api/app/command/CreateNotificationMethodCommand.java deleted file mode 100644 index 822d034ab..000000000 --- a/java/src/main/java/monasca/api/app/command/CreateNotificationMethodCommand.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - - -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; - -import org.hibernate.validator.constraints.NotEmpty; - -import java.util.List; - -import monasca.api.app.validation.NotificationMethodValidation; -import monasca.api.app.validation.Validation; - -public class CreateNotificationMethodCommand { - @NotEmpty - @Size(min = 1, max = 250) - public String name; - @NotNull - public String type; - @NotEmpty - @Size(min = 1, max = 512) - public String address; - public String period; - private int convertedPeriod = 0; - - public CreateNotificationMethodCommand() {this.period = "0";} - - public CreateNotificationMethodCommand(String name, String notificationMethodType, String address, String period) { - this.name = name; - this.type = notificationMethodType; - this.address = address; - period = period == null ? "0" : period; - this.setPeriod(period); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - CreateNotificationMethodCommand other = (CreateNotificationMethodCommand) obj; - if (address == null) { - if (other.address != null) - return false; - } else if (!address.equals(other.address)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (period == null) { - if (other.period != null) - return false; - } else if (!period.equals(other.period)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equalsIgnoreCase(other.type)) - return false; - if (convertedPeriod != other.convertedPeriod) - return false; - return true; - } - - public void validate(List validPeriods) { - NotificationMethodValidation.validate(type, address, convertedPeriod, validPeriods); - } - - public void setPeriod(String period){ - this.period = period; - this.convertedPeriod = Validation.parseAndValidateNumber(period, "period"); - } - - public void setType(String type){ - this.type = type == null ? null : type.toUpperCase(); - } - - public int getConvertedPeriod(){ - return this.convertedPeriod; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result + ((address == null) ? 0 : address.hashCode()); - result = prime * result + ((period == null) ? 0 : period.hashCode()); - result = prime * result + convertedPeriod; - return result; - } -} diff --git a/java/src/main/java/monasca/api/app/command/PatchAlarmDefinitionCommand.java b/java/src/main/java/monasca/api/app/command/PatchAlarmDefinitionCommand.java deleted file mode 100644 index d3542f38f..000000000 --- a/java/src/main/java/monasca/api/app/command/PatchAlarmDefinitionCommand.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.command; - -import java.util.List; - -import monasca.api.app.validation.AlarmValidation; - -public class PatchAlarmDefinitionCommand { - public String name; - public String description; - public String severity; - public String expression; - public List matchBy; - public Boolean actionsEnabled; - public List alarmActions; - public List okActions; - public List undeterminedActions; - - public PatchAlarmDefinitionCommand() {} - - public void validate() { - AlarmValidation.validate(name, description, severity, alarmActions, okActions, - undeterminedActions); - } - - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof PatchAlarmDefinitionCommand)) - return false; - PatchAlarmDefinitionCommand other = (PatchAlarmDefinitionCommand) obj; - if (alarmActions == null) { - if (other.alarmActions != null) - return false; - } else if (!alarmActions.equals(other.alarmActions)) - return false; - if (description == null) { - if (other.description != null) - return false; - } else if (!description.equals(other.description)) - return false; - if (expression == null) { - if (other.expression != null) - return false; - } else if (!expression.equals(other.expression)) - return false; - if (matchBy == null) { - if (other.matchBy != null) - return false; - } else if (!matchBy.equals(other.matchBy)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (okActions == null) { - if (other.okActions != null) - return false; - } else if (!okActions.equals(other.okActions)) - return false; - if (severity == null) { - if (other.severity != null) - return false; - } else if (!severity.equals(other.severity)) - return false; - if (undeterminedActions == null) { - if (other.undeterminedActions != null) - return false; - } else if (!undeterminedActions.equals(other.undeterminedActions)) - return false; - return true; - } - - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((alarmActions == null) ? 0 : alarmActions.hashCode()); - result = prime * result + ((description == null) ? 0 : description.hashCode()); - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - result = prime * result + ((matchBy == null) ? 0 : matchBy.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((okActions == null) ? 0 : okActions.hashCode()); - result = prime * result + ((severity == null) ? 0 : severity.hashCode()); - result = prime * result + ((undeterminedActions == null) ? 0 : undeterminedActions.hashCode()); - return result; - } -} diff --git a/java/src/main/java/monasca/api/app/command/PatchNotificationMethodCommand.java b/java/src/main/java/monasca/api/app/command/PatchNotificationMethodCommand.java deleted file mode 100644 index 5ed4475fa..000000000 --- a/java/src/main/java/monasca/api/app/command/PatchNotificationMethodCommand.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.List; - -import javax.validation.constraints.Size; - -import monasca.api.app.validation.NotificationMethodValidation; -import monasca.api.app.validation.Validation; - -public class PatchNotificationMethodCommand { - @Size(min = 1, max = 250) - public String name; - public String type; - @Size(min = 1, max = 512) - public String address; - public String period; - private int convertedPeriod = 0; - - public PatchNotificationMethodCommand() {} - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - PatchNotificationMethodCommand other = (PatchNotificationMethodCommand) obj; - if (address == null) { - if (other.address != null) - return false; - } else if (!address.equals(other.address)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (period == null) { - if (other.period != null) - return false; - } else if (!period.equals(other.period)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equalsIgnoreCase(other.type)) - return false; - if (convertedPeriod != other.convertedPeriod) - return false; - return true; - } - - public void validate(List validPeriods){ - NotificationMethodValidation.validate(type, address, convertedPeriod, validPeriods); - } - - @JsonProperty("period") - public void setPeriod(String period){ - this.period = period; - this.convertedPeriod = Validation.parseAndValidateNumber(period, "period"); - } - - @JsonIgnore - public void setPeriod(int period){ - this.convertedPeriod = period; - } - - public void setType(String type){ - this.type = type == null ? null : type.toUpperCase(); - } - - public int getConvertedPeriod(){ - return this.convertedPeriod; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result + ((address == null) ? 0 : address.hashCode()); - result = prime * result + ((period == null) ? 0 : period.hashCode()); - result = prime * result + convertedPeriod; - return result; - } -} diff --git a/java/src/main/java/monasca/api/app/command/UpdateAlarmCommand.java b/java/src/main/java/monasca/api/app/command/UpdateAlarmCommand.java deleted file mode 100644 index f485bf127..000000000 --- a/java/src/main/java/monasca/api/app/command/UpdateAlarmCommand.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import javax.validation.constraints.NotNull; - -import monasca.common.model.alarm.AlarmState; - -public class UpdateAlarmCommand { - - @NotNull - public AlarmState state; - @NotNull - public String lifecycleState; - @NotNull - public String link; - - public UpdateAlarmCommand() {} - - public UpdateAlarmCommand(AlarmState state, String lifecycleState, String link) { - this.state = state; - this.lifecycleState = lifecycleState; - this.link = link; - } -} diff --git a/java/src/main/java/monasca/api/app/command/UpdateAlarmDefinitionCommand.java b/java/src/main/java/monasca/api/app/command/UpdateAlarmDefinitionCommand.java deleted file mode 100644 index a288239d3..000000000 --- a/java/src/main/java/monasca/api/app/command/UpdateAlarmDefinitionCommand.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import org.hibernate.validator.constraints.NotEmpty; - -import java.util.List; - -import javax.validation.constraints.NotNull; - -import monasca.api.app.validation.AlarmValidation; - -public class UpdateAlarmDefinitionCommand { - - @NotNull - public Boolean actionsEnabled; - @NotEmpty - public String name; - @NotNull - public String description; - @NotEmpty - public String expression; - @NotNull - public List matchBy; - @NotNull - public String severity; - @NotNull - public List alarmActions; - @NotNull - public List okActions; - @NotNull - public List undeterminedActions; - - public UpdateAlarmDefinitionCommand() { - } - - public UpdateAlarmDefinitionCommand(String name, String description, String expression, - List matchBy, String severity, boolean enabled, - List alarmActions, - List okActions, List undeterminedActions) { - this.name = name; - this.description = description; - this.expression = expression; - this.matchBy = matchBy; - this.alarmActions = alarmActions; - this.okActions = okActions; - this.undeterminedActions = undeterminedActions; - this.actionsEnabled = enabled; - this.severity = severity; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof UpdateAlarmDefinitionCommand)) - return false; - UpdateAlarmDefinitionCommand other = (UpdateAlarmDefinitionCommand) obj; - if (alarmActions == null) { - if (other.alarmActions != null) - return false; - } else if (!alarmActions.equals(other.alarmActions)) - return false; - if (description == null) { - if (other.description != null) - return false; - } else if (!description.equals(other.description)) - return false; - if (expression == null) { - if (other.expression != null) - return false; - } else if (!expression.equals(other.expression)) - return false; - if (matchBy == null) { - if (other.matchBy != null) - return false; - } else if (!matchBy.equals(other.matchBy)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (okActions == null) { - if (other.okActions != null) - return false; - } else if (!okActions.equals(other.okActions)) - return false; - if (severity == null) { - if (other.severity != null) - return false; - } else if (!severity.equals(other.severity)) - return false; - if (undeterminedActions == null) { - if (other.undeterminedActions != null) - return false; - } else if (!undeterminedActions.equals(other.undeterminedActions)) - return false; - if (actionsEnabled == null) { - if (other.actionsEnabled != null) - return false; - } else if (!actionsEnabled.equals(other.actionsEnabled)) - return false; - return true; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((alarmActions == null) ? 0 : alarmActions.hashCode()); - result = prime * result + ((description == null) ? 0 : description.hashCode()); - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - result = prime * result + ((matchBy == null) ? 0 : matchBy.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((okActions == null) ? 0 : okActions.hashCode()); - result = prime * result + ((severity == null) ? 0 : severity.hashCode()); - result = prime * result + ((undeterminedActions == null) ? 0 : undeterminedActions.hashCode()); - result = prime * result + ((actionsEnabled == null) ? 0 : actionsEnabled.hashCode()); - return result; - } - - public void validate() { - AlarmValidation.validate(name, description, severity, alarmActions, okActions, - undeterminedActions); - } -} diff --git a/java/src/main/java/monasca/api/app/command/UpdateNotificationMethodCommand.java b/java/src/main/java/monasca/api/app/command/UpdateNotificationMethodCommand.java deleted file mode 100644 index 88b4bf0fd..000000000 --- a/java/src/main/java/monasca/api/app/command/UpdateNotificationMethodCommand.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.command; - -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; - -import org.hibernate.validator.constraints.NotEmpty; - -import java.util.List; - -import monasca.api.app.validation.NotificationMethodValidation; -import monasca.api.app.validation.Validation; - -public class UpdateNotificationMethodCommand { - @NotEmpty - @Size(min = 1, max = 250) - public String name; - @NotNull - public String type; - @NotEmpty - @Size(min = 1, max = 512) - public String address; - @NotNull - public String period; - private int convertedPeriod = 0; - - public UpdateNotificationMethodCommand() {} - - public UpdateNotificationMethodCommand(String name, String type, String address, String period) { - this.name = name; - this.type = type; - this.address = address; - this.setPeriod(period); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - UpdateNotificationMethodCommand other = (UpdateNotificationMethodCommand) obj; - if (address == null) { - if (other.address != null) - return false; - } else if (!address.equals(other.address)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (period == null) { - if (other.period != null) - return false; - } else if (!period.equals(other.period)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equalsIgnoreCase(other.type)) - return false; - if (convertedPeriod != other.convertedPeriod) - return false; - return true; - } - - public void validate(List validPeriods) { - NotificationMethodValidation.validate(type, address, convertedPeriod, validPeriods); - } - - public void setPeriod(String period){ - this.period = period; - this.convertedPeriod = Validation.parseAndValidateNumber(period, "period"); - } - - public void setType(String type){ - this.type = type == null ? null : type.toUpperCase(); - } - - public int getConvertedPeriod(){ - return this.convertedPeriod; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result + ((address == null) ? 0 : address.hashCode()); - result = prime * result + ((period == null) ? 0 : period.hashCode()); - result = prime * result + convertedPeriod; - return result; - } -} diff --git a/java/src/main/java/monasca/api/app/package-info.java b/java/src/main/java/monasca/api/app/package-info.java deleted file mode 100644 index a86a97623..000000000 --- a/java/src/main/java/monasca/api/app/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -/** - * Houses the application/service layer. - * - * @see http://martinfowler.com/eaaCatalog/serviceLayer.html - */ -package monasca.api.app; diff --git a/java/src/main/java/monasca/api/app/validation/AlarmValidation.java b/java/src/main/java/monasca/api/app/validation/AlarmValidation.java deleted file mode 100644 index 4b1686c22..000000000 --- a/java/src/main/java/monasca/api/app/validation/AlarmValidation.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import javax.ws.rs.WebApplicationException; - -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.api.resource.exception.Exceptions; - -/** - * Utilities for validating AlarmExpressions. - */ -public final class AlarmValidation { - - private static final List VALID_ALARM_SERVERITY = Arrays.asList("low", "medium", "high", - "critical"); - - private AlarmValidation() {} - - /** - * @throws WebApplicationException if validation fails - */ - public static void validate(String name, String description, String severity, - List alarmActions, List okActions, List undeterminedActions) { - if (name != null && name.length() > 255) - throw Exceptions.unprocessableEntity("Name %s must be 255 characters or less", name); - if (description != null && description.length() > 255) - throw Exceptions.unprocessableEntity("Description %s must be 255 characters or less", - description); - if (alarmActions != null) { - for (String action : alarmActions) - if (action.length() > 50) - throw Exceptions.unprocessableEntity("Alarm action %s must be 50 characters or less", - action); - if (checkForDuplicateNotificationMethodsInAlarmDef(alarmActions)) { - throw Exceptions - .unprocessableEntity("Alarm definition cannot have Duplicate alarm notification methods"); - } - } - if (okActions != null) { - for (String action : okActions) - if (action.length() > 50) - throw Exceptions - .unprocessableEntity("Ok action %s must be 50 characters or less", action); - if (checkForDuplicateNotificationMethodsInAlarmDef(okActions)) { - throw Exceptions - .unprocessableEntity("Alarm definition cannot have Duplicate OK notification methods"); - } - } - if (undeterminedActions != null) { - for (String action : undeterminedActions) - if (action.length() > 50) - throw Exceptions.unprocessableEntity( - "Undetermined action %s must be 50 characters or less", action); - if (checkForDuplicateNotificationMethodsInAlarmDef(undeterminedActions)) { - throw Exceptions - .unprocessableEntity("Alarm definition cannot have Duplicate Undetermined notification methods"); - } - } - if (severity != null && !VALID_ALARM_SERVERITY.contains(severity.toLowerCase())) { - throw Exceptions.unprocessableEntity("%s is not a valid severity", severity); - } - } - - /** - * Validates, normalizes and gets an AlarmExpression for the {@code expression}. - * - * @throws WebApplicationException if validation fails - */ - public static AlarmExpression validateNormalizeAndGet(String expression) { - AlarmExpression alarmExpression = null; - - try { - alarmExpression = AlarmExpression.of(expression); - } catch (IllegalArgumentException e) { - throw Exceptions.unprocessableEntityDetails("The alarm expression is invalid", - e.getMessage(), e); - } - - for (AlarmSubExpression subExpression : alarmExpression.getSubExpressions()) { - MetricDefinition metricDef = subExpression.getMetricDefinition(); - - // Normalize and validate namespace - metricDef.name = MetricNameValidation.normalize(metricDef.name); - MetricNameValidation.validate(metricDef.name, true); - - // Normalize and validate dimensions - if (metricDef.dimensions != null) { - metricDef.setDimensions(DimensionValidation.normalize(metricDef.dimensions)); - DimensionValidation.validate(metricDef.dimensions); - } - - // Validate period - if (subExpression.getPeriod() == 0) - throw Exceptions.unprocessableEntity("Period must not be 0"); - if (subExpression.getPeriod() % 60 != 0) - throw Exceptions.unprocessableEntity("Period %s must be a multiple of 60", - subExpression.getPeriod()); - - // Validate periods - if (subExpression.getPeriods() < 1) - throw Exceptions.unprocessableEntity("Periods %s must be greater than or equal to 1", - subExpression.getPeriods()); - if (subExpression.getPeriod() * subExpression.getPeriods() > 1209600) - throw Exceptions.unprocessableEntity( - "Period %s times %s must total less than 2 weeks in seconds (1209600)", - subExpression.getPeriod(), subExpression.getPeriods()); - } - - return alarmExpression; - } - - /** - * Method checks for duplicate alarm actions - */ - @SuppressWarnings("unchecked") - private static boolean checkForDuplicateNotificationMethodsInAlarmDef(List alarmActions) { - @SuppressWarnings("rawtypes") - Set inputSet = new HashSet(alarmActions); - if (inputSet.size() < alarmActions.size()) { - return true; - } - return false; - } -} diff --git a/java/src/main/java/monasca/api/app/validation/DimensionValidation.java b/java/src/main/java/monasca/api/app/validation/DimensionValidation.java deleted file mode 100644 index 330c4f68c..000000000 --- a/java/src/main/java/monasca/api/app/validation/DimensionValidation.java +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -import javax.ws.rs.WebApplicationException; - -import com.google.common.base.CharMatcher; -import com.google.common.base.Strings; -import com.google.common.primitives.Ints; -import monasca.common.model.Services; -import monasca.api.resource.exception.Exceptions; - -/** - * Utilities for validating dimensions. - */ -public final class DimensionValidation { - private static final Map VALIDATORS; - private static final Pattern UUID_PATTERN = Pattern - .compile("\\w{8}-\\w{4}-\\w{4}-\\w{4}-\\w{12}"); - private static final Pattern VALID_DIMENSION_NAME = Pattern.compile("[^><={}(),\"\\\\;&\\|]+$"); - private static final String INVALID_CHAR_STRING = "> < = { } ( ) \" \\ , ; & |"; - - private DimensionValidation() {} - - interface DimensionValidator { - boolean isValidDimension(String name, String value); - } - - static { - VALIDATORS = new HashMap(); - - // Compute validator - VALIDATORS.put(Services.COMPUTE_SERVICE, new DimensionValidator() { - @Override - public boolean isValidDimension(String name, String value) { - if ("instance_id".equals(name)) - return value.length() != 36 || UUID_PATTERN.matcher(value).matches(); - if ("az".equals(name)) - return Ints.tryParse(value) != null; - return true; - } - }); - - // Objectstore validator - VALIDATORS.put(Services.OBJECT_STORE_SERVICE, new DimensionValidator() { - @Override - public boolean isValidDimension(String name, String value) { - if ("container".equals(name)) - return value.length() < 256 || !value.contains("/"); - return true; - } - }); - - // Volume validator - VALIDATORS.put(Services.VOLUME_SERVICE, new DimensionValidator() { - @Override - public boolean isValidDimension(String name, String value) { - if ("instance_id".equals(name)) - return value.length() != 36 || UUID_PATTERN.matcher(value).matches(); - if ("az".equals(name)) - return Ints.tryParse(value) != null; - return true; - } - }); - } - - /** - * Normalizes dimensions by stripping whitespace. - */ - public static Map normalize(Map dimensions) { - if (dimensions == null) - return null; - Map result = new HashMap<>(); - for (Map.Entry dimension : dimensions.entrySet()) { - String dimensionKey = null; - if (dimension.getKey() != null) { - dimensionKey = CharMatcher.WHITESPACE.trimFrom(dimension.getKey()); - if (dimensionKey.isEmpty()) - dimensionKey = null; - } - String dimensionValue = null; - if (dimension.getValue() != null) { - dimensionValue = CharMatcher.WHITESPACE.trimFrom(dimension.getValue()); - if (dimensionValue.isEmpty()) - dimensionValue = null; - } - result.put(dimensionKey, dimensionValue); - } - - return result; - } - - /** - * Validates that the given {@code dimensions} are valid. - * - * @throws WebApplicationException if validation fails - */ - public static void validate(Map dimensions) { - // Validate dimension names and values - for (Map.Entry dimension : dimensions.entrySet()) { - String name = dimension.getKey(); - String value = dimension.getValue(); - - // General validations - validateDimensionName(name); - validateDimensionValue(value, name, false); - } - } - - /** - * Validates a list of dimension names - * @param names - */ - public static void validateNames(List names) { - if (names != null) { - for (String name : names) { - validateDimensionName(name); - } - } - } - - /** - * Validates a dimension name - * @param name Dimension name - */ - public static void validateName(String name) { - validateDimensionName(name); - } - - /** - * Validates a dimension value - * @param value Dimension value - * @param name Dimension name of the value - */ - public static void validateValue(String value, String name) { - validateDimensionValue(value, name, true); - } - - /** - * Validates a dimension name - * @param name Dimension name - */ - public static void validateDimensionName(String name) { - if (Strings.isNullOrEmpty(name)) { - throw Exceptions.unprocessableEntity("Dimension name cannot be empty"); - } - if (name.length() > 255) { - throw Exceptions.unprocessableEntity("Dimension name '%s' must be 255 characters or less", - name); - } - // Dimension name that start with underscores are reserved for internal use only. - if (name.startsWith("_")) { - throw Exceptions.unprocessableEntity("Dimension name '%s' cannot start with underscore (_)", - name); - } - - if (!VALID_DIMENSION_NAME.matcher(name).matches()) { - throw Exceptions.unprocessableEntity( - "Dimension name '%s' may not contain: %s", name, INVALID_CHAR_STRING); - } - } - - /** - * Validates a dimension value - * @param value Dimension value - * @param name Dimension name of the value - * @param nullValueOk whether or not a null value is valid - */ - public static void validateDimensionValue(String value, String name, boolean nullValueOk) { - if (value == null && nullValueOk) { - return; - } - if (Strings.isNullOrEmpty(value)) { - throw Exceptions.unprocessableEntity("Dimension '%s' cannot have an empty value", name); - } - if (value.length() > 255) { - throw Exceptions.unprocessableEntity("Dimension '%s' value '%s' must be 255 characters or less", - name, value); - } - - if (!VALID_DIMENSION_NAME.matcher(value).matches()) { - throw Exceptions.unprocessableEntity( - "Dimension '%s' value '%s' may not contain: %s", name, value, - INVALID_CHAR_STRING); - } - } -} diff --git a/java/src/main/java/monasca/api/app/validation/MetricNameValidation.java b/java/src/main/java/monasca/api/app/validation/MetricNameValidation.java deleted file mode 100644 index 1a781cbf7..000000000 --- a/java/src/main/java/monasca/api/app/validation/MetricNameValidation.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import java.util.regex.Pattern; - -import com.google.common.base.CharMatcher; -import com.google.common.base.Strings; -import monasca.api.app.command.CreateMetricCommand; -import monasca.api.resource.exception.Exceptions; -import com.sun.jersey.spi.container.WebApplication; - -/** - * Utilities for validating metric names. - */ -public class MetricNameValidation { - private static final Pattern VALID_METRIC_NAME = Pattern.compile("[^><={}(), \"\\\\;&]+$"); - - private MetricNameValidation() {} - - /** - * Normalizes the {@code metricName} by removing whitespace. - */ - public static String normalize(String metricName) { - return metricName == null ? null : CharMatcher.WHITESPACE.trimFrom(metricName); - } - - /** - * Validates the {@code metricName} for the character constraints. - * - * @throws WebApplication if validation fails - */ - public static void validate(String metricName, boolean nameRequiredFlag) { - - // General validations - - if (Strings.isNullOrEmpty(metricName)) { - if (nameRequiredFlag) { - throw Exceptions.unprocessableEntity("Metric name is required"); - } else { - return; - } - } - - if (metricName.length() > CreateMetricCommand.MAX_NAME_LENGTH) - throw Exceptions.unprocessableEntity("Metric name %s must be %d characters or less", - metricName, CreateMetricCommand.MAX_NAME_LENGTH); - if (!VALID_METRIC_NAME.matcher(metricName).matches()) - throw Exceptions.unprocessableEntity("Metric name %s may not contain: > < = { } ( ) ' \" \\ , ; &", - metricName); - } -} diff --git a/java/src/main/java/monasca/api/app/validation/NotificationMethodValidation.java b/java/src/main/java/monasca/api/app/validation/NotificationMethodValidation.java deleted file mode 100644 index 6819ff259..000000000 --- a/java/src/main/java/monasca/api/app/validation/NotificationMethodValidation.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import monasca.api.resource.exception.Exceptions; - -import org.apache.commons.validator.routines.EmailValidator; -import org.apache.commons.validator.routines.RegexValidator; -import org.apache.commons.validator.routines.UrlValidator; - -import java.util.List; - -public class NotificationMethodValidation { - private static final String[] SCHEMES = {"http","https"}; - // Allow QA to use the TLD .test. This is valid according to RFC-2606 - // The UrlValidator does not take the port off of the authority so have to handle that - private static final RegexValidator TEST_TLD_VALIDATOR = new RegexValidator(".+\\.test(:[0-9]+)?$"); - private static final UrlValidator URL_VALIDATOR = - new UrlValidator(SCHEMES, - TEST_TLD_VALIDATOR, - UrlValidator.ALLOW_LOCAL_URLS | UrlValidator.ALLOW_2_SLASHES); - - public static void validate(String type, String address, int period, - List validPeriods) { - - if (type.equals("EMAIL")) { - if (!EmailValidator.getInstance(true).isValid(address)) - throw Exceptions.unprocessableEntity("Address %s is not of correct format", address); - } - if (type.equals("WEBHOOK")) { - if (!URL_VALIDATOR.isValid(address)) - throw Exceptions.unprocessableEntity("Address %s is not of correct format", address); - if (period != 0 && !validPeriods.contains(period)){ - throw Exceptions.unprocessableEntity("%d is not a valid period", period); - } - } - if (period != 0 && !type.equals("WEBHOOK")){ - throw Exceptions.unprocessableEntity("Period can not be non zero for %s", type); - } - - } - - -} diff --git a/java/src/main/java/monasca/api/app/validation/Validation.java b/java/src/main/java/monasca/api/app/validation/Validation.java deleted file mode 100644 index 26f13545f..000000000 --- a/java/src/main/java/monasca/api/app/validation/Validation.java +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - -import com.fasterxml.jackson.databind.JsonMappingException; - -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.ws.rs.WebApplicationException; - -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.resource.exception.Exceptions; -import monasca.common.model.alarm.AlarmSeverity; - -/** - * Validation related utilities. - */ -public final class Validation { - private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults(); - private static final Splitter COLON_SPLITTER = Splitter.on(':').omitEmptyStrings().trimResults().limit( - 2); - private static final Splitter SPACE_SPLITTER = Splitter.on(' ').omitEmptyStrings().trimResults(); - private static final Splitter VERTICAL_BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults(); - private static final Joiner SPACE_JOINER = Joiner.on(' '); - private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat - .dateOptionalTimeParser().withZoneUTC(); - private static final List VALID_STATISTICS = Arrays.asList("avg", "min", "max", "sum", - "count"); - private static final List VALID_ALARM_STATE = Arrays - .asList("undetermined", "ok", "alarm"); - - private Validation() {} - - public static final String DEFAULT_ADMIN_ROLE = "monasca-admin"; - - /** - * @throws JsonMappingException if the {@code value} is not valid for the {@code type} - */ - public static > T parseAndValidate(Class type, String value) - throws JsonMappingException { - for (T constant : type.getEnumConstants()) - if (constant.name().equalsIgnoreCase(value)) - return constant; - List acceptedValues = new ArrayList<>(); - for (T constant : type.getEnumConstants()) - acceptedValues.add(constant.name()); - throw new JsonMappingException(String.format("%s was not one of %s", value, acceptedValues)); - } - - /** - * @throws WebApplicationException if the {@code date} invalid or is required and null. - */ - public static DateTime parseAndValidateDate(String date, String parameterName, boolean required) { - if (Strings.isNullOrEmpty(date)) { - if (required) - throw Exceptions.unprocessableEntity("%s is required", parameterName); - else - return null; - } - - try { - return ISO_8601_FORMATTER.parseDateTime(date); - } catch (Exception e) { - throw Exceptions.unprocessableEntity("%s (%s) must be an ISO 8601 formatted time", parameterName, date); - } - } - - /** - * @throws WebApplicationException if the {@code value} is null or empty. - */ - public static Map parseAndValidateDimensions(String dimensionsStr) { - Validation.validateNotNullOrEmpty(dimensionsStr, "dimensions"); - - Map dimensions = new HashMap(); - for (String dimensionStr : COMMA_SPLITTER.split(dimensionsStr)) { - String[] dimensionArr = Iterables.toArray(COLON_SPLITTER.split(dimensionStr), String.class); - if (dimensionArr.length == 1) { - DimensionValidation.validateName(dimensionArr[0]); - dimensions.put(dimensionArr[0], ""); - } else if (dimensionArr.length > 1) { - DimensionValidation.validateName(dimensionArr[0]); - if (dimensionArr[1].contains("|")) { - List dimensionValueArr = VERTICAL_BAR_SPLITTER.splitToList(dimensionArr[1]); - for (String dimensionValue : dimensionValueArr) { - DimensionValidation.validateValue(dimensionValue, dimensionArr[0]); - } - } else { - DimensionValidation.validateValue(dimensionArr[1], dimensionArr[0]); - } - dimensions.put(dimensionArr[0], dimensionArr[1]); - } - } - - //DimensionValidation.validate(dimensions); - return dimensions; - } - - /** - * @throws WebApplicationException if the {@code number} is invalid. - */ - public static int parseAndValidateNumber(String number, String parameterName) { - try { - return Integer.parseInt(number); - } catch (NumberFormatException e) { - throw Exceptions.unprocessableEntity("%s (%s) must be valid number", parameterName, number); - } - } - - /** - * @throws WebApplicationException if the {@code statistics} empty or invalid. - */ - public static List parseValidateAndNormalizeStatistics(Iterable statistics) { - List validStats = new ArrayList(5); - for (String statistic : statistics) { - String statisticLower = statistic.toLowerCase(); - if (!VALID_STATISTICS.contains(statisticLower)) - throw Exceptions.unprocessableEntity("%s is not a valid statistic", statistic); - validStats.add(statisticLower); - } - - if (validStats.isEmpty()) - throw Exceptions.unprocessableEntity("Statistics are required"); - - return validStats; - } - - /** - * @throws WebApplicationException if the {@code statistics} empty or invalid. - */ - public static void validateAlarmState(String state) { - String stateLower = state.toLowerCase(); - if (!VALID_ALARM_STATE.contains(stateLower)) { - throw Exceptions.unprocessableEntity("%s is not a valid state", state); - } - } - - /** - * @throws WebApplicationException if the {@code value} is null or empty. - */ - public static void validateNotNullOrEmpty(String value, String parameterName) { - if (Strings.isNullOrEmpty(value)) - throw Exceptions.unprocessableEntity("%s is required", parameterName); - } - - /** - * @throws WebApplicationException if the {@code startTime} or {@code endTime} are invalid - */ - public static void validateTimes(DateTime startTime, DateTime endTime) { - if (endTime != null && !startTime.isBefore(endTime)) - throw Exceptions.badRequest("start_time (%s) must be before end_time (%s)", startTime, - endTime); - } - - public static Boolean validateAndParseMergeMetricsFlag(String mergeMetricsFlag) { - - if (mergeMetricsFlag == null) { - - return false; - - } else if (!"true".equalsIgnoreCase(mergeMetricsFlag) - && !"false".equalsIgnoreCase(mergeMetricsFlag)) { - - throw Exceptions.badRequest("merge_metrics must be either 'true' or 'false'"); - - } else { - - return Boolean.parseBoolean(mergeMetricsFlag); - } - } - - public static List parseAndValidateMetricsGroupBy(String groupBy) { - - if (!Strings.isNullOrEmpty(groupBy)) { - return COMMA_SPLITTER.splitToList(groupBy); - } - return new ArrayList<>(); - } - - public static void validateLifecycleState(String lifecycleState) { - if (lifecycleState != null) { - if (lifecycleState.length() > 50) { - throw Exceptions - .unprocessableEntity("Lifecycle state '%s' must be 50 characters or less", - lifecycleState); - } - } - } - - public static void validateLink(String link) { - if (link != null) { - if (link.length() > 512) { - throw Exceptions.unprocessableEntity("Link '%s' must be 512 characters or less", link); - } - } - } - - /** - * Convenience method for checking cross project access - */ - public static String getQueryProject(String roles, - String crossTenantId, - String tenantId, - String admin_role) throws Exception - { - String queryTenantId = tenantId; - - boolean isAdmin = !Strings.isNullOrEmpty(roles) && - COMMA_SPLITTER.splitToList(roles).contains(admin_role); - - if (isCrossProjectRequest(crossTenantId, tenantId)) { - if (isAdmin) { - queryTenantId = crossTenantId; - } else { - throw Exceptions.forbidden("Only users with %s role can GET cross tenant metrics", - admin_role); - } - } - - return queryTenantId; - } - - /** - * Convenience method for determining if request is across projects. - */ - public static boolean isCrossProjectRequest(String crossTenantId, String tenantId) { - return !Strings.isNullOrEmpty(crossTenantId) && !crossTenantId.equals(tenantId); - } - - public static List parseAndValidateSeverity(String severityStr) { - List severityList = null; - if (severityStr != null && !severityStr.isEmpty()) { - severityList = new ArrayList<>(); - List severities = Lists.newArrayList(VERTICAL_BAR_SPLITTER.split(severityStr)); - for (String severity : severities) { - AlarmSeverity s = AlarmSeverity.fromString(severity); - if (s != null) { - severityList.add(s); - } else { - throw Exceptions.unprocessableEntity(String.format("Invalid severity %s", - severity)); - } - } - } - return severityList; - } - - public static List parseAndValidateSortBy(String sortBy, final List allowed_sort_by) { - List sortByList = new ArrayList<>(); - if (sortBy != null && !sortBy.isEmpty()) { - List fieldList = COMMA_SPLITTER.omitEmptyStrings().trimResults().splitToList(sortBy); - for (String sortByField: fieldList) { - List field = Lists.newArrayList(SPACE_SPLITTER.split(sortByField.toLowerCase())); - if (field.size() > 2) { - throw Exceptions.unprocessableEntity(String.format("Invalid sort_by format %s", sortByField)); - } - if (!allowed_sort_by.contains(field.get(0))) { - throw Exceptions.unprocessableEntity(String.format("Sort_by field %s must be one of %s", field.get(0), allowed_sort_by)); - } - if (field.size() > 1 && !field.get(1).equals("desc") && !field.get(1).equals("asc")) { - throw Exceptions.unprocessableEntity(String.format("Sort_by order %s must be 'asc' or 'desc'", field.get(1))); - } - sortByList.add(SPACE_JOINER.join(field)); - } - } - return sortByList; - } -} diff --git a/java/src/main/java/monasca/api/app/validation/ValueMetaValidation.java b/java/src/main/java/monasca/api/app/validation/ValueMetaValidation.java deleted file mode 100644 index 69e003626..000000000 --- a/java/src/main/java/monasca/api/app/validation/ValueMetaValidation.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.app.validation; - -import com.google.common.base.CharMatcher; -import com.google.common.base.Strings; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -import monasca.api.resource.exception.Exceptions; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import javax.ws.rs.WebApplicationException; - -/** - * Utilities for validating valueMeta. - */ -public final class ValueMetaValidation { - private static final int VALUE_META_MAX_NUMBER = 16; - private static final int VALUE_META_VALUE_MAX_LENGTH = 2048; - private static final int VALUE_META_NAME_MAX_LENGTH = 255; - private static final Map EMPTY_VALUE_META = Collections - .unmodifiableMap(new HashMap()); - - private static final ObjectMapper objectMapper = new ObjectMapper(); - - private ValueMetaValidation() {} - - /** - * Normalizes valueMeta by stripping whitespace from name. validate() must - * already have been called on the valueMeta - */ - public static Map normalize(Map valueMeta) { - if (valueMeta == null || valueMeta.isEmpty()) { - return EMPTY_VALUE_META; - } - final Map result = new HashMap<>(); - for (Map.Entry entry : valueMeta.entrySet()) { - final String key = CharMatcher.WHITESPACE.trimFrom(entry.getKey()); - result.put(key, entry.getValue()); - } - - return result; - } - - /** - * Validates that the given {@code valueMetas} are valid. - * - * @throws WebApplicationException if validation fails - */ - public static void validate(Map valueMetas) { - if (valueMetas.size() > VALUE_META_MAX_NUMBER) { - throw Exceptions.unprocessableEntity("Maximum number of valueMeta key/value pairs is %d", - VALUE_META_MAX_NUMBER); - } - - // Validate valueMeta names and values - for (Map.Entry valueMeta : valueMetas.entrySet()) { - // Have to check for null first because later check is for trimmed name - if (valueMeta.getKey() == null) { - throw Exceptions.unprocessableEntity("valueMeta name cannot be empty"); - } - final String name = CharMatcher.WHITESPACE.trimFrom(valueMeta.getKey()); - String value = valueMeta.getValue(); - if (value == null) { - // Store nulls as empty strings - value = ""; - } - - // General validations - if (Strings.isNullOrEmpty(name)) { - throw Exceptions.unprocessableEntity("valueMeta name cannot be empty"); - } - if (name.length() > VALUE_META_NAME_MAX_LENGTH) { - throw Exceptions.unprocessableEntity("valueMeta name %s must be %d characters or less", - name, VALUE_META_NAME_MAX_LENGTH); - } - } - verifyValueMetaStringLength(valueMetas); - } - - private static void verifyValueMetaStringLength(Map valueMetas) { - - try { - String valueMetaString = objectMapper.writeValueAsString(valueMetas); - - if (valueMetaString.length() > VALUE_META_VALUE_MAX_LENGTH) { - throw Exceptions.unprocessableEntity("valueMeta name value combinations %s must be %d characters or less", - valueMetaString, VALUE_META_VALUE_MAX_LENGTH); - } - } catch (JsonProcessingException e) { - throw Exceptions.unprocessableEntity("Failed to serialize valueMeta combinations %s", valueMetas); - } - } -} - diff --git a/java/src/main/java/monasca/api/domain/DomainModule.java b/java/src/main/java/monasca/api/domain/DomainModule.java deleted file mode 100644 index 70b43cd27..000000000 --- a/java/src/main/java/monasca/api/domain/DomainModule.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain; - -import javax.inject.Singleton; - -import com.google.inject.AbstractModule; -import monasca.api.domain.model.version.VersionRepo; -import monasca.api.domain.service.impl.VersionRepoImpl; - -/** - * Domain layer bindings. - */ -public class DomainModule extends AbstractModule { - @Override - protected void configure() { - bind(VersionRepo.class).to(VersionRepoImpl.class).in(Singleton.class); - } -} diff --git a/java/src/main/java/monasca/api/domain/exception/EntityExistsException.java b/java/src/main/java/monasca/api/domain/exception/EntityExistsException.java deleted file mode 100644 index 721f7b5c4..000000000 --- a/java/src/main/java/monasca/api/domain/exception/EntityExistsException.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.exception; - -/** - * Indicates that a domain entity already exists. - */ -public class EntityExistsException extends RuntimeException { - private static final long serialVersionUID = 1L; - - public EntityExistsException(Exception ex, String msg) { - super(msg, ex); - } - - public EntityExistsException(Exception ex, String msg, Object... args) { - super(String.format(msg, args), ex); - } - - public EntityExistsException(String msg) { - super(msg); - } - - public EntityExistsException(String msg, Object... args) { - super(String.format(msg, args)); - } -} diff --git a/java/src/main/java/monasca/api/domain/exception/EntityNotFoundException.java b/java/src/main/java/monasca/api/domain/exception/EntityNotFoundException.java deleted file mode 100644 index 9f5fa3192..000000000 --- a/java/src/main/java/monasca/api/domain/exception/EntityNotFoundException.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.exception; - -/** - * Indicates that a domain entity is unknown. - */ -public class EntityNotFoundException extends RuntimeException { - private static final long serialVersionUID = 1L; - - public EntityNotFoundException(Exception ex, String msg) { - super(msg, ex); - } - - public EntityNotFoundException(Exception ex, String msg, Object... args) { - super(String.format(msg, args), ex); - } - - public EntityNotFoundException(String msg) { - super(msg); - } - - public EntityNotFoundException(String msg, Object... args) { - super(String.format(msg, args)); - } -} diff --git a/java/src/main/java/monasca/api/domain/exception/InvalidEntityException.java b/java/src/main/java/monasca/api/domain/exception/InvalidEntityException.java deleted file mode 100644 index 38b215d39..000000000 --- a/java/src/main/java/monasca/api/domain/exception/InvalidEntityException.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.exception; - -/** - * Indicates that an entity is invalid. - */ -public class InvalidEntityException extends RuntimeException { - private static final long serialVersionUID = 1L; - - public InvalidEntityException(Exception ex, String msg) { - super(msg, ex); - } - - public InvalidEntityException(Exception ex, String msg, Object... args) { - super(String.format(msg, args), ex); - } - - public InvalidEntityException(String msg) { - super(msg); - } - - public InvalidEntityException(String msg, Object... args) { - super(String.format(msg, args)); - } -} diff --git a/java/src/main/java/monasca/api/domain/exception/MultipleMetricsException.java b/java/src/main/java/monasca/api/domain/exception/MultipleMetricsException.java deleted file mode 100644 index 380d2e556..000000000 --- a/java/src/main/java/monasca/api/domain/exception/MultipleMetricsException.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.exception; - -import java.util.HashMap; -import java.util.Map; - -public class MultipleMetricsException extends Exception { - - private String metricName; - private Map dimensions; - - public MultipleMetricsException() { - super(); - init(null, null); - } - - public MultipleMetricsException(String metricName, Map dimensions) { - super(); - init(metricName, dimensions); - } - - public MultipleMetricsException(String metricName, Map dimensions, - String message) { - super(message); - init(metricName, dimensions); - } - - public MultipleMetricsException(String metricName, Map dimensions, - String message, Throwable cause) { - super(message, cause); - init(metricName, dimensions); - } - - public MultipleMetricsException(String metricName, Map dimensions, - Throwable cause) { - super(cause); - init(metricName, dimensions); - } - - public MultipleMetricsException(String metricName, Map dimensions, - String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - init(metricName, dimensions); - - } - - private void init(String metricName, Map dimensions) { - this.metricName = metricName == null ? "" : metricName; - this.dimensions = dimensions == null ? new HashMap() : dimensions; - } - - public String getMetricName() { - return metricName; - } - - public Map getDimensions() { - return dimensions; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/alarm/Alarm.java b/java/src/main/java/monasca/api/domain/model/alarm/Alarm.java deleted file mode 100644 index 040dd7ab3..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarm/Alarm.java +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarm; - -import org.joda.time.DateTime; -import org.apache.commons.collections4.CollectionUtils; - -import java.util.List; - -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.metric.MetricDefinition; -import monasca.common.model.domain.common.AbstractEntity; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; - -@XmlRootElement(name = "Alarm") -public class Alarm extends AbstractEntity implements Linked { - private List links; - private List metrics; - private AlarmState state; - private String lifecycleState; - private String link; - private AlarmDefinitionShort alarmDefinition; - private DateTime stateUpdatedTimestamp; - private DateTime updatedTimestamp; - private DateTime createdTimestamp; - - public Alarm() {} - - public Alarm(String id, String alarmDefinitionId, String alarmDefinitionName, - String alarmDefinitionSeverity, List metrics, AlarmState state, - String lifecycleState, String link, DateTime stateUpdatedTimestamp, - DateTime updatedTimestamp, DateTime createdTimestamp) { - this.id = id; - setMetrics(metrics); - setState(state); - setLifecycleState(lifecycleState); - setLink(link); - setStateUpdatedTimestamp(stateUpdatedTimestamp); - setUpdatedTimestamp(updatedTimestamp); - setCreatedTimestamp(createdTimestamp); - this.alarmDefinition = new AlarmDefinitionShort(alarmDefinitionId, alarmDefinitionName, alarmDefinitionSeverity); - } - - public String getId() { - return id; - } - - public List getLinks() { - return links; - } - - public AlarmState getState() { - return state; - } - - public String getLifecycleState() { - return lifecycleState; - } - - public String getLink() { - return link; - } - - public DateTime getStateUpdatedTimestamp() { - return stateUpdatedTimestamp; - } - - public DateTime getUpdatedTimestamp() { - return updatedTimestamp; - } - - public DateTime getCreatedTimestamp() { - return createdTimestamp; - } - - @XmlElement(name = "id") - public void setId(String id) { - this.id = id; - } - - @Override - public void setLinks(List links) { - this.links = links; - } - - public void setState(AlarmState state) { - this.state = state; - } - - public void setLifecycleState(String lifecycleState) { - this.lifecycleState = lifecycleState; - } - - public void setLink(String link) { - this.link = link; - } - - public void setStateUpdatedTimestamp(DateTime stateUpdatedTimestamp) { - this.stateUpdatedTimestamp = stateUpdatedTimestamp; - } - - public void setUpdatedTimestamp(DateTime updatedTimestamp) { - this.updatedTimestamp = updatedTimestamp; - } - - public void setCreatedTimestamp(DateTime createdTimestamp) { - this.createdTimestamp = createdTimestamp; - } - - public List getMetrics() { - return metrics; - } - - public void setMetrics(List metrics) { - this.metrics = metrics; - } - - public AlarmDefinitionShort getAlarmDefinition() { - return alarmDefinition; - } - - public void setAlarmDefinition(AlarmDefinitionShort alarmDefinition) { - this.alarmDefinition = alarmDefinition; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((alarmDefinition == null) ? 0 : alarmDefinition.hashCode()); - result = prime * result + ((links == null) ? 0 : links.hashCode()); - result = prime * result + ((metrics == null) ? 0 : metrics.hashCode()); - result = prime * result + ((state == null) ? 0 : state.hashCode()); - result = prime * result + ((lifecycleState == null) ? 0 : lifecycleState.hashCode()); - result = prime * result + ((link == null) ? 0 : link.hashCode()); - result = prime * result + ((stateUpdatedTimestamp == null) ? 0 : stateUpdatedTimestamp.hashCode()); - result = prime * result + ((updatedTimestamp == null) ? 0 : updatedTimestamp.hashCode()); - result = prime * result + ((createdTimestamp == null) ? 0 : createdTimestamp.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; - Alarm other = (Alarm) obj; - if (alarmDefinition == null) { - if (other.alarmDefinition != null) - return false; - } else if (!alarmDefinition.equals(other.alarmDefinition)) - return false; - if (links == null) { - if (other.links != null) - return false; - } else if (!links.equals(other.links)) - return false; - if (metrics == null) { - if (other.metrics != null) - return false; - } else if (!CollectionUtils.isEqualCollection(metrics, other.metrics)) - // order agnostic collection equality check - return false; - if (state != other.state) - return false; - if (lifecycleState == null) { - if (other.lifecycleState != null) - return false; - } else if (!lifecycleState.equals(other.lifecycleState)) - return false; - if (link == null) { - if (other.link != null) - return false; - } else if (!link.equals(other.link)) - return false; - // Ignore timezones, only check milliseconds since epoch - if (stateUpdatedTimestamp != other.stateUpdatedTimestamp) { - if (stateUpdatedTimestamp == null || other.stateUpdatedTimestamp == null) { - return false; - } else if (stateUpdatedTimestamp.getMillis() != other.stateUpdatedTimestamp.getMillis()) { - return false; - } - } - if (updatedTimestamp != other.updatedTimestamp) { - if (updatedTimestamp == null || other.updatedTimestamp == null) { - return false; - } else if (updatedTimestamp.getMillis() != other.updatedTimestamp.getMillis()) { - return false; - } - } - if (createdTimestamp != other.createdTimestamp) { - if (createdTimestamp == null || other.createdTimestamp == null) { - return false; - } else if (createdTimestamp.getMillis() != other.createdTimestamp.getMillis()) { - return false; - } - } - return true; - } - - /** - * This class holds the parts of AlarmDefinition that are returned through the API with - * an Alarm - * @author craigbr - * - */ - public static class AlarmDefinitionShort extends AbstractEntity implements Linked { - private String name; - private String severity; - private List links; - - public AlarmDefinitionShort() { - } - - public AlarmDefinitionShort(String id, String name, String severity) { - this.id = id; - this.name = name; - this.severity = severity; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getSeverity() { - return severity; - } - - public void setSeverity(String severity) { - this.severity = severity; - } - - @Override - public List getLinks() { - return links; - } - - @Override - public void setLinks(List links) { - this.links = links; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((links == null) ? 0 : links.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((severity == null) ? 0 : severity.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; - AlarmDefinitionShort other = (AlarmDefinitionShort) obj; - if (links == null) { - if (other.links != null) - return false; - } else if (!links.equals(other.links)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (severity == null) { - if (other.severity != null) - return false; - } else if (!severity.equals(other.severity)) - return false; - return true; - } - } -} diff --git a/java/src/main/java/monasca/api/domain/model/alarm/AlarmCount.java b/java/src/main/java/monasca/api/domain/model/alarm/AlarmCount.java deleted file mode 100644 index 05cbd9661..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarm/AlarmCount.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarm; - -import java.util.ArrayList; -import java.util.List; - -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; -import monasca.common.model.domain.common.AbstractEntity; - -public class AlarmCount extends AbstractEntity implements Linked { - private List links; - private List columns; - private List> counts; - - public AlarmCount() {} - - public AlarmCount(List columns, List> counts) { - this.columns = new ArrayList<>(); - this.columns.add("count"); - if (columns != null) { - this.columns.addAll(columns); - } - this.counts = new ArrayList<>(); - this.counts.addAll(counts); - } - - public void setColumns(List columns) { - this.columns = columns; - } - - public List getColumns() { - return this.columns; - } - - public void setCounts(List> counts) { - this.counts = counts; - } - - public List> getCounts() { - return this.counts; - } - - public void setLinks(List links) { - this.links = links; - } - - public List getLinks() { - return this.links; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/alarm/AlarmRepo.java b/java/src/main/java/monasca/api/domain/model/alarm/AlarmRepo.java deleted file mode 100644 index a96fef876..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarm/AlarmRepo.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2015-2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarm; - -import org.joda.time.DateTime; - -import java.util.List; -import java.util.Map; - -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.api.domain.exception.EntityNotFoundException; - -public interface AlarmRepo { - /** - * Deletes all alarms associated with the {@code id}. - */ - void deleteById(String tenantId, String id); - - /** - * Returns alarms for the given criteria. - */ - List find(String tenantId, String alarmDefId, String metricName, Map metricDimensions, AlarmState state, List severities, String lifecycleState, String link, DateTime stateUpdatedStart, - List sort_by, String offset, int limit, boolean enforceLimit); - - /** - * @throws EntityNotFoundException if an alarm cannot be found for the {@code id} - */ - Alarm findById(String tenantId, String id); - - /** - * Updates the state and returns the original alarm for the {@code id}. - * @return the original alarm before any state change - */ - Alarm update(String tenantId, String id, AlarmState state, String lifecycleState, String link); - - /** - * Gets the AlarmSubExpressions mapped by their Ids for an Alarm Id - */ - Map findAlarmSubExpressions(String alarmId); - - /** - * Gets the AlarmSubExpressions mapped by their Ids then mapped by alarm id for an - * Alarm Definition Id - */ - Map> findAlarmSubExpressionsForAlarmDefinition(String alarmDefinitionId); - - /** - * Gets the count(s) of the alarms matching the parameters - * @return 2 dimensional list of the counts with their group tags - */ - AlarmCount getAlarmsCount(String tenantId, String alarmDefId, String metricName, - Map metricDimensions, AlarmState state, - List severities, String lifecycleState, String link, - DateTime stateUpdatedStart, List groupBy, - String offset, int limit); -} diff --git a/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinition.java b/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinition.java deleted file mode 100644 index 634c73e22..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinition.java +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarmdefinition; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -import java.util.Collections; -import java.util.List; - -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.domain.common.AbstractEntity; - -@XmlRootElement(name = "Alarm definition") -public class AlarmDefinition extends AbstractEntity implements Linked { - private List links; - private String name; - private String description = ""; - private String expression; - private boolean deterministic; - private Object expressionData; - private List matchBy; - private String severity; - private boolean actionsEnabled; - private List alarmActions; - private List okActions; - private List undeterminedActions; - - public AlarmDefinition() {} - - public AlarmDefinition(String id, String name, String description, String severity, - String expression, List matchBy, boolean actionsEnabled, List alarmActions, - List okActions, List undeterminedActions) { - this.id = id; - this.name = name; - setDescription(description); - setSeverity(severity); - setExpression(expression); - setMatchBy(matchBy); - setActionsEnabled(actionsEnabled); - setAlarmActions(alarmActions); - setOkActions(okActions); - setUndeterminedActions(undeterminedActions); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (!(obj instanceof AlarmDefinition)) - return false; - AlarmDefinition other = (AlarmDefinition) obj; - if (actionsEnabled != other.actionsEnabled) - return false; - if (alarmActions == null) { - if (other.alarmActions != null) - return false; - } else if (!alarmActions.equals(other.alarmActions)) - return false; - if (description == null) { - if (other.description != null) - return false; - } else if (!description.equals(other.description)) - return false; - if (expression == null) { - if (other.expression != null) - return false; - } else if (!expression.equals(other.expression)) - return false; - if (expressionData == null) { - if (other.expressionData != null) - return false; - } else if (!expressionData.equals(other.expressionData)) - return false; - if (this.deterministic != other.deterministic) { - return false; - } - if (links == null) { - if (other.links != null) - return false; - } else if (!links.equals(other.links)) - return false; - if (matchBy == null) { - if (other.matchBy != null) - return false; - } else if (!matchBy.equals(other.matchBy)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (okActions == null) { - if (other.okActions != null) - return false; - } else if (!okActions.equals(other.okActions)) - return false; - if (severity == null) { - if (other.severity != null) - return false; - } else if (!severity.equals(other.severity)) - return false; - if (undeterminedActions == null) { - if (other.undeterminedActions != null) - return false; - } else if (!undeterminedActions.equals(other.undeterminedActions)) - return false; - return true; - } - - public List getAlarmActions() { - return alarmActions; - } - - public String getDescription() { - return description; - } - - public String getExpression() { - return expression; - } - - public Object getExpressionData() { - return expressionData; - } - - public String getId() { - return id; - } - - public List getLinks() { - return links; - } - - public List getMatchBy() { - return matchBy; - } - - public String getName() { - return name; - } - - public List getOkActions() { - return okActions; - } - - public String getSeverity() { - return severity; - } - - public List getUndeterminedActions() { - return undeterminedActions; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + (actionsEnabled ? 1231 : 1237); - result = prime * result + ((alarmActions == null) ? 0 : alarmActions.hashCode()); - result = prime * result + ((description == null) ? 0 : description.hashCode()); - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - result = prime * result + ((expressionData == null) ? 0 : expressionData.hashCode()); - result = prime * result + Boolean.valueOf(this.deterministic).hashCode(); - result = prime * result + ((links == null) ? 0 : links.hashCode()); - result = prime * result + ((matchBy == null) ? 0 : matchBy.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((okActions == null) ? 0 : okActions.hashCode()); - result = prime * result + ((severity == null) ? 0 : severity.hashCode()); - result = prime * result + ((undeterminedActions == null) ? 0 : undeterminedActions.hashCode()); - return result; - } - - public boolean isActionsEnabled() { - return actionsEnabled; - } - - public void setActionsEnabled(boolean actionsEnabled) { - this.actionsEnabled = actionsEnabled; - } - - public void setAlarmActions(List alarmActions) { - this.alarmActions = alarmActions; - } - - public void setDescription(String description) { - this.description = description == null ? "" : description; - } - - public void setExpression(String expression) { - this.expression = expression; - - final AlarmExpression alarmExpression = AlarmExpression.of(expression); - this.setExpressionData(alarmExpression.getExpressionTree()); - this.deterministic = alarmExpression.isDeterministic(); - } - - @JsonIgnore - public void setExpressionData(Object expressionData) { - this.expressionData = expressionData; - } - - @XmlElement(name = "id") - public void setId(String id) { - this.id = id; - } - - @Override - public void setLinks(List links) { - this.links = links; - } - - public void setMatchBy(List matchBy) { - this.matchBy = matchBy == null ? Collections.emptyList() : matchBy; - } - - public void setName(String name) { - this.name = name; - } - - public void setOkActions(List okActions) { - this.okActions = okActions; - } - - public void setSeverity(String severity) { - this.severity = severity; - } - - public void setUndeterminedActions(List undeterminedActions) { - this.undeterminedActions = undeterminedActions; - } - - public boolean isDeterministic() { - return this.deterministic; - } - - @Override - public String toString() { - return String.format("AlarmDefinition [name=%s]", name); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinitionRepo.java b/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinitionRepo.java deleted file mode 100644 index 07b17f733..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarmdefinition/AlarmDefinitionRepo.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarmdefinition; - -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.api.domain.exception.EntityNotFoundException; - -/** - * Repository for alarm definitions. - */ -public interface AlarmDefinitionRepo { - /** - * Creates and returns a new alarm definition for the criteria. - */ - AlarmDefinition create(String tenantId, String id, String name, String description, - String severity, String expression, Map subExpressions, - List matchBy, List alarmActions, List okActions, - List undeterminedActions); - - /** - * @throws EntityNotFoundException if an alarm definition cannot be found for the - * {@code alarmDefId} - */ - void deleteById(String tenantId, String alarmDefId); - - /** - * Returns true if an alarm exists for the given criteria, else false. - */ - String exists(String tenantId, String name); - - /** - * Returns alarms for the given criteria. - */ - List find(String tenantId, String name, Map dimensions, - List severities, List sortBy, String offset, - int limit); - - /** - * @throws EntityNotFoundException if an alarm cannot be found for the {@code alarmDefId} - */ - AlarmDefinition findById(String tenantId, String alarmDefId); - - /** - * Returns the sub-alarm Ids for the {@code alarmDefId}. - */ - Map findSubAlarmMetricDefinitions(String alarmDefId); - - /** - * Returns the sub expressions for the {@code alarmDefId}. - */ - Map findSubExpressions(String alarmDefId); - - /** - * Updates and returns an alarm definition for the criteria. - */ - void update(String tenantId, String id, boolean patch, String name, String description, - String expression, List matchBy, String severity, boolean actionsEnabled, - Collection oldSubAlarmIds, Map changedSubAlarms, - Map newSubAlarms, List alarmActions, - List okActions, List undeterminedActions); -} diff --git a/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistory.java b/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistory.java deleted file mode 100644 index 6207e810e..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistory.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarmstatehistory; - -import java.util.List; - -import monasca.common.model.alarm.AlarmTransitionSubAlarm; -import org.joda.time.DateTime; - -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.domain.common.AbstractEntity; -import monasca.common.model.metric.MetricDefinition; -import monasca.common.util.Conversions; - -public class AlarmStateHistory extends AbstractEntity { - private String alarmId; - private List metrics; - private AlarmState oldState; - private AlarmState newState; - private String reason; - private String reasonData; - private DateTime timestamp; - private List subAlarms; - - public AlarmStateHistory() {} - - public AlarmStateHistory( - String alarmId, - List metrics, - AlarmState oldState, - AlarmState newState, - List subAlarms, - String reason, - String reasonData, - DateTime timestamp) { - this.alarmId = alarmId; - this.setMetrics(metrics); - this.oldState = oldState; - this.newState = newState; - this.subAlarms = subAlarms; - this.reason = reason; - this.reasonData = reasonData; - this.timestamp = Conversions.variantToDateTime(timestamp); - this.id = timestamp.toString(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof AlarmStateHistory)) - return false; - AlarmStateHistory other = (AlarmStateHistory) obj; - if (alarmId == null) { - if (other.alarmId != null) - return false; - } else if (!alarmId.equals(other.alarmId)) - return false; - if (metrics == null) { - if (other.metrics != null) - return false; - } else if (!metrics.equals(other.metrics)) - return false; - if (newState != other.newState) - return false; - if (oldState != other.oldState) - return false; - if (subAlarms == null) { - if (other.subAlarms != null) - return false; - } else if (!subAlarms.equals(other.subAlarms)) - return false; - if (reason == null) { - if (other.reason != null) - return false; - } else if (!reason.equals(other.reason)) - return false; - if (reasonData == null) { - if (other.reasonData != null) - return false; - } else if (!reasonData.equals(other.reasonData)) - return false; - if (timestamp == null) { - if (other.timestamp != null) - return false; - } else if (!timestamp.equals(other.timestamp)) - return false; - return true; - } - - public void setId(String id) { - this.id = id; - } - - public String getAlarmId() { - return alarmId; - } - - public List getMetrics() { - return metrics; - } - - public AlarmState getNewState() { - return newState; - } - - public AlarmState getOldState() { - return oldState; - } - - public String getReason() { - return reason; - } - - public String getReasonData() { - return reasonData; - } - - public DateTime getTimestamp() { - return timestamp; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode()); - result = prime * result + ((metrics == null) ? 0 : metrics.hashCode()); - result = prime * result + ((newState == null) ? 0 : newState.hashCode()); - result = prime * result + ((oldState == null) ? 0 : oldState.hashCode()); - result = prime * result + ((subAlarms == null) ? 0 : subAlarms.hashCode()); - result = prime * result + ((reason == null) ? 0 : reason.hashCode()); - result = prime * result + ((reasonData == null) ? 0 : reasonData.hashCode()); - result = prime * result + ((timestamp == null) ? 0 : timestamp.hashCode()); - return result; - } - - public void setAlarmId(String alarmId) { - this.alarmId = alarmId; - } - - public void setMetrics(List metrics) { - this.metrics = metrics; - } - - public void setNewState(AlarmState newState) { - this.newState = newState; - } - - public void setOldState(AlarmState oldState) { - this.oldState = oldState; - } - - public void setReason(String reason) { - this.reason = reason; - } - - public void setReasonData(String reasonData) { - this.reasonData = reasonData; - } - - public List getSubAlarms() { - return subAlarms; - } - - public void setSubAlarms(List subAlarms) { - this.subAlarms = subAlarms; - } - - public void setTimestamp(DateTime timestamp) { - this.timestamp = Conversions.variantToDateTime(timestamp); - // Set the id in the AbstractEntity class. - id = timestamp.toString(); - } - - @Override - public String toString() { - return "AlarmStateHistory [alarmId=" + alarmId + ", metrics=" + metrics + ", oldState=" - + oldState + ", newState=" + newState + ", subAlarms=" + subAlarms + ", reason=" + reason + ", reasonData=" + reasonData - + ", timestamp=" + timestamp + "]"; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistoryRepo.java b/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistoryRepo.java deleted file mode 100644 index b6bbfc1b0..000000000 --- a/java/src/main/java/monasca/api/domain/model/alarmstatehistory/AlarmStateHistoryRepo.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.alarmstatehistory; - -import monasca.api.domain.exception.EntityNotFoundException; -import org.joda.time.DateTime; - -import javax.annotation.Nullable; -import java.util.List; -import java.util.Map; - -/** - * Repository for alarm state history. - */ -public interface AlarmStateHistoryRepo { - /** - * @throws EntityNotFoundException if an alarm cannot be found for the {@code alarmId} - */ - List findById(String tenantId, String alarmId, String offset, int limit) throws Exception; - - /** - * Finds AlarmStateHistory for the given criteria. - */ - List find(String tenantId, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, @Nullable String offset, int limit) throws Exception; -} diff --git a/java/src/main/java/monasca/api/domain/model/common/Link.java b/java/src/main/java/monasca/api/domain/model/common/Link.java deleted file mode 100644 index e378a3c41..000000000 --- a/java/src/main/java/monasca/api/domain/model/common/Link.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.common; - -public class Link { - public String rel; - - public String href; - - public Link() {} - - public Link(String rel, String href) { - this.rel = rel; - this.href = href; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Link other = (Link) obj; - if (href == null) { - if (other.href != null) - return false; - } else if (!href.equals(other.href)) - return false; - if (rel == null) { - if (other.rel != null) - return false; - } else if (!rel.equals(other.rel)) - return false; - return true; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((href == null) ? 0 : href.hashCode()); - result = prime * result + ((rel == null) ? 0 : rel.hashCode()); - return result; - } - - @Override - public String toString() { - return String.format("Link [rel=%s, href=%s]", rel, href); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/common/Linked.java b/java/src/main/java/monasca/api/domain/model/common/Linked.java deleted file mode 100644 index 4d4de9e68..000000000 --- a/java/src/main/java/monasca/api/domain/model/common/Linked.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.common; - -import java.util.List; - -/** - * Defines a type that can be described via a set of links. - */ -public interface Linked { - List getLinks(); - - void setLinks(List links); -} diff --git a/java/src/main/java/monasca/api/domain/model/common/Paged.java b/java/src/main/java/monasca/api/domain/model/common/Paged.java deleted file mode 100644 index 19ae3fdb5..000000000 --- a/java/src/main/java/monasca/api/domain/model/common/Paged.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * (C) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.common; - -import java.util.ArrayList; -import java.util.List; - -public class Paged { - - public static final int LIMIT = 10000; - - public List links = new ArrayList<>(); - - public List elements; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((elements == null) ? 0 : elements.hashCode()); - result = prime * result + ((links == null) ? 0 : links.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Paged other = (Paged) obj; - if (elements == null) { - if (other.elements != null) - return false; - } else if (!elements.equals(other.elements)) - return false; - if (links == null) { - if (other.links != null) - return false; - } else if (!links.equals(other.links)) - return false; - return true; - } - - @Override - public String toString() { - return "Paged [links=" + links + ", elements=" + elements + "]"; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/dimension/DimensionBase.java b/java/src/main/java/monasca/api/domain/model/dimension/DimensionBase.java deleted file mode 100644 index ca9a6d49d..000000000 --- a/java/src/main/java/monasca/api/domain/model/dimension/DimensionBase.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.dimension; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; - -import monasca.common.model.domain.common.AbstractEntity; - -/** - * Base class for DimensionNames and DimensionValues. - */ -public abstract class DimensionBase extends AbstractEntity { - @JsonInclude(JsonInclude.Include.NON_NULL) - final private String metricName; - final private String id; - - public DimensionBase(String metricName, String id) { - this.metricName = metricName; - this.id = id; - } - - public String getMetricName() { - return metricName; - } - - @JsonIgnore - public String getId() { - return id; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - DimensionBase other = (DimensionBase) obj; - if (metricName == null) { - if (other.getMetricName() != null) - return false; - } else if (!metricName.equals(other.getMetricName())) - return false; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - return true; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 17; - result = prime * result + ((metricName == null) ? 0 : metricName.hashCode()); - result = prime * result + ((id == null) ? 0 : id.hashCode()); - return result; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/dimension/DimensionName.java b/java/src/main/java/monasca/api/domain/model/dimension/DimensionName.java deleted file mode 100644 index 4a0848c20..000000000 --- a/java/src/main/java/monasca/api/domain/model/dimension/DimensionName.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.dimension; - -/** - * Encapsulates dimension name for an optional metric name. - */ -public class DimensionName extends DimensionBase { - final private String dimensionName; - - public DimensionName(String metricName, String dimensionName) { - super(metricName, dimensionName); - this.dimensionName = dimensionName; - } - - public String getDimensionName() { - return dimensionName; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null || getClass() != obj.getClass()) - return false; - DimensionName other = (DimensionName) obj; - if (dimensionName == null) { - if (other.dimensionName != null) - return false; - } else if (!dimensionName.equals(other.dimensionName)) - return false; - return super.equals(obj); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((dimensionName == null) ? 0 : dimensionName.hashCode()); - return result; - } - - @Override - public String toString() { - return String.format("DimensionName: MetricName=%s DimensionName [names=%s]", - getMetricName(), dimensionName); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/dimension/DimensionRepo.java b/java/src/main/java/monasca/api/domain/model/dimension/DimensionRepo.java deleted file mode 100644 index a99a5b8cb..000000000 --- a/java/src/main/java/monasca/api/domain/model/dimension/DimensionRepo.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.dimension; - -import javax.annotation.Nullable; - -import java.util.List; - -/** - * Repository for dimensions. - */ -public interface DimensionRepo { - /** - * Finds dimension values given a dimension name and - * optional metric name. - */ - List findValues(String metricName, - String tenantId, - String dimensionName, - @Nullable String offset, - int limit) - throws Exception; - - /** - * Finds dimension names given an optional metric name. - */ - List findNames(String metricName, - String tenantId, - @Nullable String offset, - int limit) - throws Exception; -} diff --git a/java/src/main/java/monasca/api/domain/model/dimension/DimensionValue.java b/java/src/main/java/monasca/api/domain/model/dimension/DimensionValue.java deleted file mode 100644 index c25322980..000000000 --- a/java/src/main/java/monasca/api/domain/model/dimension/DimensionValue.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.dimension; - -/** - * Encapsulates dimension value for a given dimension name - * (and optional metric-name). - */ -public class DimensionValue extends DimensionBase { - - final private String dimensionName; - final private String dimensionValue; - - public DimensionValue(String metricName, String dimensionName, String dimensionValue) { - super(metricName, dimensionValue); - this.dimensionName = dimensionName; - this.dimensionValue = dimensionValue; - } - - public String getDimensionValue() { - return dimensionValue; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null || getClass() != obj.getClass()) - return false; - DimensionValue other = (DimensionValue) obj; - if (dimensionName == null) { - if (other.dimensionName != null) - return false; - } else if (!dimensionName.equals(other.dimensionName)) - return false; - if (dimensionValue == null) { - if (other.dimensionValue != null) - return false; - } else if (!dimensionValue.equals(other.dimensionValue)) - return false; - return super.equals(obj); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((dimensionName == null) ? 0 : dimensionName.hashCode()); - result = prime * result + ((dimensionValue == null) ? 0 : dimensionValue.hashCode()); - return result; - } - - @Override - public String toString() { - return String.format("DimensionValue: MetricName=%s DimensionValue [name=%s, values=%s]", - getMetricName(), dimensionName, dimensionValue); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/measurement/MeasurementRepo.java b/java/src/main/java/monasca/api/domain/model/measurement/MeasurementRepo.java deleted file mode 100644 index a526ebc24..000000000 --- a/java/src/main/java/monasca/api/domain/model/measurement/MeasurementRepo.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.measurement; - -import org.joda.time.DateTime; - -import javax.annotation.Nullable; -import java.util.List; -import java.util.Map; - -/** - * Repository for measurements. - */ -public interface MeasurementRepo { - /** - * Finds measurements for the given criteria. - */ - List find(String tenantId, String name, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, @Nullable String offset, - int limit, Boolean mergeMetricsFlag, List groupBy) - throws Exception; -} diff --git a/java/src/main/java/monasca/api/domain/model/measurement/Measurements.java b/java/src/main/java/monasca/api/domain/model/measurement/Measurements.java deleted file mode 100644 index 9c4298d81..000000000 --- a/java/src/main/java/monasca/api/domain/model/measurement/Measurements.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.measurement; - -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import monasca.common.model.domain.common.AbstractEntity; - -/** - * Encapsulates a metric measurements. - */ -public class Measurements extends AbstractEntity implements Comparable { - private static final List COLUMNS = Arrays.asList("timestamp", "value", "value_meta"); - - protected String name; - protected Map dimensions; - protected List columns = COLUMNS; - protected List> measurements; - - public Measurements() { - measurements = new LinkedList<>(); - } - - public Measurements(String name, Map dimensions, List> measurements) { - this.name = name; - this.dimensions = dimensions; - this.measurements = measurements; - } - - public Measurements(String name, Map dimensions) { - this.name = name; - this.dimensions = dimensions; - this.measurements = new LinkedList<>(); - } - - public void addMeasurement(List measurement) { - measurements.add(measurement); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Measurements other = (Measurements) obj; - if (dimensions == null) { - if (other.dimensions != null) - return false; - } else if (!dimensions.equals(other.dimensions)) - return false; - if (measurements == null) { - if (other.measurements != null) - return false; - } else if (!measurements.equals(other.measurements)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (columns == null) { - if (other.columns != null) - return false; - } else if (!columns.equals(other.columns)) - return false; - return true; - } - - public List getColumns() { - return columns; - } - - public Map getDimensions() { - return dimensions; - } - - public List> getMeasurements() { - return measurements; - } - - public String getName() { - return name; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode()); - result = prime * result + ((measurements == null) ? 0 : measurements.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((columns == null) ? 0 : columns.hashCode()); - return result; - } - - public void setDimensions(Map dimensions) { - this.dimensions = dimensions; - } - - public void setMeasurements(List> measurements) { - this.measurements = measurements; - } - - public void setName(String name) { - this.name = name; - } - - public void setId(String id) { - this.id = id; - } - - @Override - public String toString() { - return String.format("Measurement [name=%s, dimensions=%s, measurements=%s]", name, dimensions, - measurements); - } - - @Override - public int compareTo(Measurements other) { - return this.id.compareTo(other.getId()); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/metric/MetricDefinitionRepo.java b/java/src/main/java/monasca/api/domain/model/metric/MetricDefinitionRepo.java deleted file mode 100644 index b9776489f..000000000 --- a/java/src/main/java/monasca/api/domain/model/metric/MetricDefinitionRepo.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.metric; - -import monasca.common.model.metric.MetricDefinition; - -import org.joda.time.DateTime; - -import java.util.List; -import java.util.Map; - -/** - * Repository for metrics. - */ -public interface MetricDefinitionRepo { - - /** - * Finds metrics for the given criteria. - */ - List find(String tenantId, String name, Map dimensions, - DateTime startTime, DateTime endTime, String offset, int limit) - throws Exception; - - List findNames(String tenantId, Map dimensions, String offset, int limit) throws Exception; -} diff --git a/java/src/main/java/monasca/api/domain/model/metric/MetricName.java b/java/src/main/java/monasca/api/domain/model/metric/MetricName.java deleted file mode 100644 index 9b0bbef1b..000000000 --- a/java/src/main/java/monasca/api/domain/model/metric/MetricName.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.metric; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -import monasca.common.model.domain.common.AbstractEntity; - - -public class MetricName extends AbstractEntity implements Comparable { - - private String id; - private String name; - - public MetricName(String name) { - this.id = name; - this.name = name; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - MetricName other = (MetricName) obj; - if (id == null) { - if (other.id != null) { - return false; - } - } else if (!id.equals(other.id)) { - return false; - } - if (name == null) { - if (other.name != null) { - return false; - } - } else if (!name.equals(other.name)) { - return false; - } - return true; - } - - @JsonIgnore - public String getId() {return id;} - - public String getName() {return name;} - - @Override - public int hashCode() { - final int prime = 31; - int result = 17; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - return result; - } - - public void setName(String name) {this.name = name;} - - @Override - public int compareTo(MetricName other) { - return this.name.compareTo(other.name); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethod.java b/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethod.java deleted file mode 100644 index 381eb2cda..000000000 --- a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethod.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.notificationmethod; - -import java.util.List; - -import monasca.common.model.domain.common.AbstractEntity; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; - -public class NotificationMethod extends AbstractEntity implements Linked { - private List links; - private String name; - private String type; - private String address; - private int period; - - public NotificationMethod() {} - - public NotificationMethod(String id, String name, String type, String address, int period) { - this.id = id; - this.name = name; - this.type = type.toUpperCase(); - this.address = address; - this.period = period; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; - NotificationMethod other = (NotificationMethod) obj; - if (address == null) { - if (other.address != null) - return false; - } else if (!address.equals(other.address)) - return false; - if (name == null) { - if (other.name != null) - return false; - } else if (!name.equals(other.name)) - return false; - if (period != other.period) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equalsIgnoreCase(other.type)) - return false; - return true; - } - - public String getAddress() { - return address; - } - - public String getId() { - return id; - } - - public List getLinks() { - return links; - } - - public String getName() { - return name; - } - - public String getType() { - return type; - } - - public int getPeriod() { - return period; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((address == null) ? 0 : address.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result + period; - return result; - } - - public void setAddress(String address) { - this.address = address; - } - - public void setId(String id) { - this.id = id; - } - - public void setLinks(List links) { - this.links = links; - } - - public void setName(String name) { - this.name = name; - } - - public void setType(String type) { - this.type = type.toUpperCase(); - } - - public void setPeriod(int period) { - this.period = period; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodRepo.java b/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodRepo.java deleted file mode 100644 index 4d1c504c1..000000000 --- a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodRepo.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.notificationmethod; - -import java.util.List; - -import monasca.api.domain.exception.EntityNotFoundException; - -/** - * Repository for notification methods. - */ -public interface NotificationMethodRepo { - NotificationMethod create(String tenantId, String name, String type, - String address, int period); - - /** - * @throws EntityNotFoundException if a notification method cannot be found for the - * {@code notificationMethodId} - */ - void deleteById(String tenantId, String notificationMethodId); - - /** Returns whether the {@code notificationMethodId} exists for the {@code tenantId}. */ - boolean exists(String tenantId, String notificationMethodId); - - /** - * @throws EntityNotFoundException if a notification method cannot be found for the - * {@code notificationMethodId} - */ - NotificationMethod findById(String tenantId, String notificationMethodId); - - /** - * @throws EntityNotFoundException if a notification method cannot be found for the - * {@code notificationMethodId} - */ - NotificationMethod update(String tenantId, String notificationMethodId, String name, - String type, String address, int period); - - List find(String tenantId, List sortBy, String offset, int limit); -} diff --git a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodType.java b/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodType.java deleted file mode 100644 index 22012c2ec..000000000 --- a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodType.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.notificationmethod; - -import monasca.common.model.domain.common.AbstractEntity; - -public class NotificationMethodType extends AbstractEntity{ - - private String type; - - -public NotificationMethodType() { -} - -public NotificationMethodType(String type) { - this.type = type.toUpperCase(); -} - -public String getType() { - return type; -} - -public void setType(String type) { - this.type = type; -} - -public String getId() {return type;} - -@Override -public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - return result; -} - -@Override -public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; - NotificationMethodType other = (NotificationMethodType) obj; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equals(other.type)) - return false; - return true; -} - - -} \ No newline at end of file diff --git a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodTypesRepo.java b/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodTypesRepo.java deleted file mode 100644 index ba5eaa6c9..000000000 --- a/java/src/main/java/monasca/api/domain/model/notificationmethod/NotificationMethodTypesRepo.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.notificationmethod; - -import java.util.List; - - - -/** - * Repository for notification methods. - */ -public interface NotificationMethodTypesRepo { - - List listNotificationMethodTypes(); -} diff --git a/java/src/main/java/monasca/api/domain/model/statistic/StatisticRepo.java b/java/src/main/java/monasca/api/domain/model/statistic/StatisticRepo.java deleted file mode 100644 index 4ec29f0dd..000000000 --- a/java/src/main/java/monasca/api/domain/model/statistic/StatisticRepo.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.statistic; - -import org.joda.time.DateTime; - -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; - -/** - * Repository for statistics. - */ -public interface StatisticRepo { - - /** - * Finds statistics for the given criteria. - */ - List find(String tenantId, String name, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, List statistics, - int period, String offset, int limit, Boolean mergeMetricsFlag, - List groupBy) - throws Exception; -} diff --git a/java/src/main/java/monasca/api/domain/model/statistic/Statistics.java b/java/src/main/java/monasca/api/domain/model/statistic/Statistics.java deleted file mode 100644 index 107113423..000000000 --- a/java/src/main/java/monasca/api/domain/model/statistic/Statistics.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.statistic; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import monasca.api.domain.model.measurement.Measurements; - -/** - * Encapsulates a metric measurements. - */ -public class Statistics extends Measurements { - - public Statistics() { - super(); - } - - public Statistics(String name, Map dimensions, List columns) { - super(name, dimensions); - this.columns = columns; - - } - - @Override - @JsonProperty("statistics") - public List> getMeasurements() { - return this.measurements; - } - - public void setColumns(List columns) { - this.columns = columns; - } - - @Override - public String toString() { - return String.format("Statistics [name=%s, dimensions=%s,statistics=%s]", name, dimensions, - measurements); - } -} diff --git a/java/src/main/java/monasca/api/domain/model/version/Version.java b/java/src/main/java/monasca/api/domain/model/version/Version.java deleted file mode 100644 index 1f6445f08..000000000 --- a/java/src/main/java/monasca/api/domain/model/version/Version.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.version; - -import java.util.List; - -import org.joda.time.DateTime; - -import monasca.common.model.domain.common.AbstractEntity; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; - -public class Version extends AbstractEntity implements Linked { - private List links; - public VersionStatus status; - public DateTime updated; - - public enum VersionStatus { - CURRENT, DEPRECATED, OBSOLETE; - } - - public Version() {} - - public Version(String id, VersionStatus status, DateTime updated) { - this.id = id; - this.status = status; - this.updated = updated; - } - - public String getId() { - return id; - } - - @Override - public List getLinks() { - return links; - } - - @Override - public void setLinks(List links) { - this.links = links; - } -} diff --git a/java/src/main/java/monasca/api/domain/model/version/VersionRepo.java b/java/src/main/java/monasca/api/domain/model/version/VersionRepo.java deleted file mode 100644 index 905c848b9..000000000 --- a/java/src/main/java/monasca/api/domain/model/version/VersionRepo.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.model.version; - -import monasca.api.domain.exception.EntityNotFoundException; - -import java.util.List; - -/** - * Repository for versions. - */ -public interface VersionRepo { - List find(); - - /** - * @throws EntityNotFoundException a version cannot be found for the {@code versionId} - */ - Version findById(String versionId); -} diff --git a/java/src/main/java/monasca/api/domain/package-info.java b/java/src/main/java/monasca/api/domain/package-info.java deleted file mode 100644 index 8c9d7b62d..000000000 --- a/java/src/main/java/monasca/api/domain/package-info.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -/** - * Houses the domain layer. - */ -package monasca.api.domain; diff --git a/java/src/main/java/monasca/api/domain/service/impl/VersionRepoImpl.java b/java/src/main/java/monasca/api/domain/service/impl/VersionRepoImpl.java deleted file mode 100644 index 35f165aab..000000000 --- a/java/src/main/java/monasca/api/domain/service/impl/VersionRepoImpl.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.domain.service.impl; - -import java.util.Arrays; -import java.util.List; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.version.Version; -import monasca.api.domain.model.version.Version.VersionStatus; -import monasca.api.domain.model.version.VersionRepo; - -/** - * Version repository implementation. - */ -public class VersionRepoImpl implements VersionRepo { - private static final Version v2_0 = new Version("v2.0", VersionStatus.CURRENT, new DateTime( - DateTimeZone.UTC)); - - @Override - public List find() { - return Arrays.asList(v2_0); - } - - @Override - public Version findById(String versionId) { - if ("v2.0".equals(versionId)) - return v2_0; - throw new EntityNotFoundException("No version exists for %s", versionId); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/InfrastructureModule.java b/java/src/main/java/monasca/api/infrastructure/InfrastructureModule.java deleted file mode 100644 index b510ca1cb..000000000 --- a/java/src/main/java/monasca/api/infrastructure/InfrastructureModule.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure; - -import javax.inject.Singleton; - -import com.google.inject.AbstractModule; -import com.google.inject.ProvisionException; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; -import monasca.api.domain.model.dimension.DimensionRepo; -import monasca.api.domain.model.measurement.MeasurementRepo; -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.domain.model.notificationmethod.NotificationMethodTypesRepo; -import monasca.api.domain.model.statistic.StatisticRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.infrastructure.persistence.Utils; -import monasca.api.infrastructure.persistence.hibernate.AlarmDefinitionSqlRepoImpl; -import monasca.api.infrastructure.persistence.hibernate.AlarmHibernateUtils; -import monasca.api.infrastructure.persistence.hibernate.AlarmSqlRepoImpl; -import monasca.api.infrastructure.persistence.hibernate.NotificationMethodSqlRepoImpl; -import monasca.api.infrastructure.persistence.hibernate.NotificationMethodTypesSqlRepoImpl; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9AlarmStateHistoryRepo; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9DimensionRepo; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9MeasurementRepo; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9MetricDefinitionRepo; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9RepoReader; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9StatisticRepo; -import monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils; -import monasca.api.infrastructure.persistence.mysql.AlarmDefinitionMySqlRepoImpl; -import monasca.api.infrastructure.persistence.mysql.AlarmMySqlRepoImpl; -import monasca.api.infrastructure.persistence.mysql.MySQLUtils; -import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl; -import monasca.api.infrastructure.persistence.mysql.NotificationMethodTypesMySqlRepoImpl; -import monasca.api.infrastructure.persistence.vertica.AlarmStateHistoryVerticaRepoImpl; -import monasca.api.infrastructure.persistence.vertica.DimensionVerticaRepoImpl; -import monasca.api.infrastructure.persistence.vertica.MeasurementVerticaRepoImpl; -import monasca.api.infrastructure.persistence.vertica.MetricDefinitionVerticaRepoImpl; -import monasca.api.infrastructure.persistence.vertica.StatisticVerticaRepoImpl; - -/** - * Infrastructure layer bindings. - */ -public class InfrastructureModule extends AbstractModule { - - private ApiConfig config; - - private static final String VERTICA = "vertica"; - private static final String INFLUXDB = "influxdb"; - private static final String INFLUXDB_V9 = "v9"; - - public InfrastructureModule(ApiConfig config) { - this.config = config; - } - - @Override - protected void configure() { - final boolean hibernateEnabled = this.isHibernateEnabled(); - - this.bindUtils(hibernateEnabled); - - // Bind repositories - - if (hibernateEnabled) { - this.bind(AlarmRepo.class).to(AlarmSqlRepoImpl.class).in(Singleton.class); - this.bind(AlarmDefinitionRepo.class).to(AlarmDefinitionSqlRepoImpl.class).in(Singleton.class); - this.bind(NotificationMethodRepo.class).to(NotificationMethodSqlRepoImpl.class).in(Singleton.class); - this.bind(NotificationMethodTypesRepo.class).to(NotificationMethodTypesSqlRepoImpl.class).in(Singleton.class); - } else { - bind(AlarmRepo.class).to(AlarmMySqlRepoImpl.class).in(Singleton.class); - bind(AlarmDefinitionRepo.class).to(AlarmDefinitionMySqlRepoImpl.class).in(Singleton.class); - bind(NotificationMethodRepo.class).to(NotificationMethodMySqlRepoImpl.class).in(Singleton.class); - bind(NotificationMethodTypesRepo.class).to(NotificationMethodTypesMySqlRepoImpl.class).in(Singleton.class); - bind(PersistUtils.class).in(Singleton.class); - } - - if (config.databaseConfiguration.getDatabaseType().trim().equalsIgnoreCase(VERTICA)) { - - bind(AlarmStateHistoryRepo.class).to(AlarmStateHistoryVerticaRepoImpl.class).in(Singleton.class); - bind(DimensionRepo.class).to(DimensionVerticaRepoImpl.class).in(Singleton.class); - bind(MetricDefinitionRepo.class).to(MetricDefinitionVerticaRepoImpl.class).in(Singleton.class); - bind(MeasurementRepo.class).to(MeasurementVerticaRepoImpl.class).in(Singleton.class); - bind(StatisticRepo.class).to(StatisticVerticaRepoImpl.class).in(Singleton.class); - - } else if (config.databaseConfiguration.getDatabaseType().trim().equalsIgnoreCase(INFLUXDB)) { - - if (config.influxDB.getVersion() != null && !config.influxDB.getVersion() - .equalsIgnoreCase(INFLUXDB_V9)) { - - System.err.println("Found unsupported Influxdb version: " + config.influxDB.getVersion()); - System.err.println("Supported Influxdb versions are 'v9'"); - System.err.println("Check your config file"); - System.exit(1); - - } - - bind(InfluxV9Utils.class).in(Singleton.class); - bind(InfluxV9RepoReader.class).in(Singleton.class); - bind(AlarmStateHistoryRepo.class).to(InfluxV9AlarmStateHistoryRepo.class).in(Singleton.class); - bind(DimensionRepo.class).to(InfluxV9DimensionRepo.class).in(Singleton.class); - bind(MetricDefinitionRepo.class).to(InfluxV9MetricDefinitionRepo.class).in(Singleton.class); - bind(MeasurementRepo.class).to(InfluxV9MeasurementRepo.class).in(Singleton.class); - bind(StatisticRepo.class).to(InfluxV9StatisticRepo.class).in(Singleton.class); - - } else { - - throw new ProvisionException("Failed to detect supported database. Supported databases are " - + "'vertica' and 'influxdb'. Check your config file."); - } - } - - private boolean isHibernateEnabled() { - return this.config.hibernate != null && this.config.hibernate.getSupportEnabled(); - } - - private void bindUtils(final boolean hibernateEnabled) { - final Class implementation = hibernateEnabled ? AlarmHibernateUtils.class : MySQLUtils.class; - this.bind(Utils.class).to(implementation).in(Singleton.class); - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/middleware/MiddlewareConfiguration.java b/java/src/main/java/monasca/api/infrastructure/middleware/MiddlewareConfiguration.java deleted file mode 100644 index 1ba26f125..000000000 --- a/java/src/main/java/monasca/api/infrastructure/middleware/MiddlewareConfiguration.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.middleware; - -import java.util.List; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * CS Middleware configuration. - */ -public class MiddlewareConfiguration { - public Boolean enabled = false; - @JsonProperty - public String serverVIP; - @JsonProperty - public String serverPort; - @JsonProperty - public Boolean useHttps = Boolean.FALSE; - @JsonProperty - public String connTimeout = "500"; - @JsonProperty - public Boolean connSSLClientAuth = Boolean.FALSE; - @JsonProperty - public String connPoolMaxActive = "3"; - @JsonProperty - public String connPoolMaxIdle = "3"; - @JsonProperty - public String connPoolEvictPeriod = "600000"; - @JsonProperty - public String connPoolMinIdleTime = "600000"; - @JsonProperty - public String connRetryTimes = "2"; - @JsonProperty - public String connRetryInterval = "50"; - @JsonProperty - public List defaultAuthorizedRoles; - @JsonProperty - public List readOnlyAuthorizedRoles; - @JsonProperty - public List agentAuthorizedRoles; - @JsonProperty - public String delegateAuthorizedRole; - @JsonProperty - public String adminRole; - @JsonProperty - public String timeToCacheToken = "600"; - @JsonProperty - public String adminAuthMethod; - @JsonProperty - public String adminUser; - @JsonProperty - public String adminToken; - @JsonProperty - public String adminPassword; - @JsonProperty - public String adminProjectId = ""; - @JsonProperty - public String adminProjectName = ""; - @JsonProperty - public String adminUserDomainId = ""; - @JsonProperty - public String adminUserDomainName = ""; - @JsonProperty - public String adminProjectDomainId = ""; - @JsonProperty - public String adminProjectDomainName = ""; - @JsonProperty - public String maxTokenCacheSize = "1048576"; - @JsonProperty - public String truststore; - @JsonProperty - public String truststorePassword; - @JsonProperty - public String keystore; - @JsonProperty - public String keystorePassword; -} diff --git a/java/src/main/java/monasca/api/infrastructure/package-info.java b/java/src/main/java/monasca/api/infrastructure/package-info.java deleted file mode 100644 index e975bf62a..000000000 --- a/java/src/main/java/monasca/api/infrastructure/package-info.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -/** - * Houses the infrastructure layer. - */ -package monasca.api.infrastructure; diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/DimensionQueries.java b/java/src/main/java/monasca/api/infrastructure/persistence/DimensionQueries.java deleted file mode 100644 index 81f4b64c1..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/DimensionQueries.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.skife.jdbi.v2.Query; - -/** - * Utilities for building dimension queries. - * - * This class has issues with testing with mockito because bind method on Query class is final. - */ -public final class DimensionQueries { - - private DimensionQueries() {} - - public static void bindDimensionsToQuery( - Query query, - Map dimensions) { - - if (dimensions != null) { - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - query.bind("dname" + i, entry.getKey()); - if (!Strings.isNullOrEmpty(entry.getValue())) { - List values = Splitter.on('|').splitToList(entry.getValue()); - int j = 0; - for (String value : values) { - query.bind("dvalue" + i + '_' + j, value); - j++; - } - } - } - } - } - - public static Map dimensionsFor(String dimensionSet) { - - Map dimensions = Collections.emptyMap(); - - if (dimensionSet != null) { - dimensions = new HashMap<>(); - for (String kvStr : dimensionSet.split(",")) { - String[] kv = kvStr.split("="); - if (kv.length > 1) - dimensions.put(kv[0], kv[1]); - } - } - - return dimensions; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/PersistUtils.java b/java/src/main/java/monasca/api/infrastructure/persistence/PersistUtils.java deleted file mode 100644 index 6271ca1fd..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/PersistUtils.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence; - -import monasca.api.ApiConfig; - -import com.google.inject.Inject; - -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.text.ParseException; -import java.util.Date; - -public class PersistUtils { - - private static final Logger logger = LoggerFactory.getLogger(PersistUtils.class); - - private int maxQueryLimit; - - private final int DEFAULT_MAX_QUERY_LIMIT = 10000; - - private DateTimeFormatter isoFormat = ISODateTimeFormat.dateTime(); - - @Inject - public PersistUtils(ApiConfig config) { - - setMaxQueryLimit(config.maxQueryLimit); - - } - - private void setMaxQueryLimit(int maxQueryLimit) { - - // maxQueryLimit could be 0 if maxQueryLimit is not specified in the config file. - if (maxQueryLimit <= 0) { - - logger.warn(String.format("Found invalid maxQueryLimit: [%1d]. maxQueryLimit must be a positive integer.", maxQueryLimit)); - logger.warn(String.format("Setting maxQueryLimit to default: [%1d]", DEFAULT_MAX_QUERY_LIMIT)); - logger.warn("Please check your config file for a valid maxQueryLimit property"); - - this.maxQueryLimit = DEFAULT_MAX_QUERY_LIMIT; - - } else { - - this.maxQueryLimit = maxQueryLimit; - } - } - - public PersistUtils(int maxQueryLimit) { - - setMaxQueryLimit(maxQueryLimit); - - } - - public PersistUtils() { - - this.maxQueryLimit = DEFAULT_MAX_QUERY_LIMIT; - } - - public int getLimit(String limit) { - - if (limit == null || limit.isEmpty()) { - return this.maxQueryLimit; - } - - int limitInt; - try { - limitInt = Integer.parseInt(limit); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - String.format("Found invalid Limit: '%1$s'. Limit must be a positive integer.", limit)); - } - - if (limitInt <= 0) { - throw new IllegalArgumentException( - String.format("Found invalid Limit: '%1$s'. Limit must be a positive integer.", limit)); - } - - if (limitInt <= this.maxQueryLimit) { - - return limitInt; - - } else { - - return this.maxQueryLimit; - } - } - - public Date parseTimestamp(String timestampString) throws ParseException { - return isoFormat.parseDateTime(timestampString.trim().replace(' ', 'T')).toDate(); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueries.java b/java/src/main/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueries.java deleted file mode 100644 index 2e26a4a8c..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueries.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence; - -import com.google.common.base.Strings; - -import org.skife.jdbi.v2.Query; - -import java.util.Map; - -/** - * Utilities for building sub alarm queries. - */ -public final class SubAlarmDefinitionQueries { - private SubAlarmDefinitionQueries() {} - - public static String buildJoinClauseFor(Map dimensions) { - - StringBuilder sbJoin = new StringBuilder(); - - if (dimensions != null) { - - sbJoin = new StringBuilder(); - - int i = 0; - for (String dimension_key : dimensions.keySet()) { - sbJoin.append(" inner join sub_alarm_definition_dimension d").append(i).append(" on d") - .append(i) - .append(".dimension_name = :dname").append(i); - if (!Strings.isNullOrEmpty(dimensions.get(dimension_key))) { - sbJoin.append(" and d").append(i) - .append(".value = :dvalue").append(i); - } - sbJoin.append(" and dim.sub_alarm_definition_id = d") - .append(i).append(".sub_alarm_definition_id"); - i++; - } - } - - return sbJoin.toString(); - } - - public static void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Map.Entry entry: dimensions.entrySet()) { - query.bind("dname" + i, entry.getKey()); - query.bind("dvalue" + i, entry.getValue()); - i++; - } - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/Utils.java b/java/src/main/java/monasca/api/infrastructure/persistence/Utils.java deleted file mode 100644 index 76701426c..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/Utils.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * - */ - -package monasca.api.infrastructure.persistence; - -import java.util.List; -import java.util.Map; - - -abstract public class Utils { - - public abstract List findAlarmIds(String tenantId, - Map dimensions); - - protected String buildJoinClauseFor(Map dimensions) { - - if ((dimensions == null) || dimensions.isEmpty()) { - return ""; - } - - final StringBuilder sb = - new StringBuilder( - "join alarm_metric as am on a.id=am.alarm_id " - + "join metric_definition_dimensions as mdd on am.metric_definition_dimensions_id=mdd.id "); - - for (int i = 0; i < dimensions.size(); i++) { - - final String tableAlias = "md" + i; - - sb.append(" inner join metric_dimension ") - .append(tableAlias) - .append(" on ") - .append(tableAlias) - .append(".name = :dname") - .append(i) - .append(" and ") - .append(tableAlias) - .append(".value = :dvalue") - .append(i) - .append(" and mdd.metric_dimension_set_id = ") - .append(tableAlias) - .append(".dimension_set_id"); - } - - return sb.toString(); - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepoImpl.java deleted file mode 100644 index a14ed3e71..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepoImpl.java +++ /dev/null @@ -1,828 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.collections4.MapUtils; -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.StatelessSession; -import org.hibernate.Transaction; -import org.hibernate.criterion.Projections; -import org.hibernate.criterion.Restrictions; -import org.hibernate.transform.AliasToEntityMapResultTransformer; -import org.hibernate.transform.ResultTransformer; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.infrastructure.persistence.SubAlarmDefinitionQueries; -import monasca.common.hibernate.db.AlarmActionDb; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDimensionId; -import monasca.common.model.alarm.AggregateFunction; -import monasca.common.model.alarm.AlarmOperator; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; - -/** - * Alarm repository implementation. - */ -public class AlarmDefinitionSqlRepoImpl - extends BaseSqlRepo - implements AlarmDefinitionRepo { - private static final ResultTransformer ALARM_DEF_RESULT_TRANSFORMER = getAlarmDefResultTransformer(); - private static final String ID = "ID"; - private static final String NAME = "NAME"; - private static final String DESCRIPTION = "DESCRIPTION"; - private static final String EXPRESSION = "EXPRESSION"; - private static final String SEVERITY = "SEVERITY"; - private static final String MATCH_BY = "MATCH_BY"; - private static final String ACTIONS_ENABLED = "ACTIONS_ENABLED"; - private static final String STATE = "STATE"; - private static final String NOTIFICATION_ID = "NOTIFICATIONIDS"; - private static final Joiner COMMA_JOINER = Joiner.on(','); - private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults(); - private static final Logger logger = LoggerFactory.getLogger(AlarmDefinitionSqlRepoImpl.class); - private static final String FIND_ALARM_DEF_SQL = "SELECT t.id, t.tenant_id, t.name, " - + "t.description, t.expression, t.severity, t.match_by, " - + "t.actions_enabled, aa.alarm_state AS state, aa.action_id AS notificationIds " - + "FROM (SELECT distinct ad.id, ad.tenant_id, ad.name, ad.description, ad.expression, " - + "ad.severity, ad.match_by, ad.actions_enabled, ad.created_at, ad.updated_at, ad.deleted_at " - + "FROM alarm_definition AS ad LEFT OUTER JOIN sub_alarm_definition AS sad ON ad.id = sad.alarm_definition_id " - + "LEFT OUTER JOIN sub_alarm_definition_dimension AS dim ON sad.id = dim.sub_alarm_definition_id %1$s " - + "WHERE ad.tenant_id = :tenantId AND ad.deleted_at IS NULL %2$s ORDER BY ad.id %3$s) AS t " - + "LEFT OUTER JOIN alarm_action AS aa ON t.id = aa.alarm_definition_id %4$s"; - - @Inject - public AlarmDefinitionSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) { - super(sessionFactory); - } - - @Override - public AlarmDefinition create(String tenantId, String id, String name, String description, String severity, String expression, - Map subExpressions, List matchBy, List alarmActions, List okActions, - List undeterminedActions) { - logger.trace(ORM_LOG_MARKER, "create(...) entering..."); - - Transaction tx = null; - Session session = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - final DateTime now = this.getUTCNow(); - final AlarmDefinitionDb alarmDefinition = new AlarmDefinitionDb( - id, - tenantId, - name, - description, - expression, - AlarmSeverity.valueOf(severity.toUpperCase()), - matchBy == null || Iterables.isEmpty(matchBy) ? null : COMMA_JOINER.join(matchBy), - true, - now, - now, - null - ); - session.save(alarmDefinition); - - this.createSubExpressions(session, alarmDefinition, subExpressions); - - // Persist actions - this.persistActions(session, alarmDefinition, AlarmState.ALARM, alarmActions); - this.persistActions(session, alarmDefinition, AlarmState.OK, okActions); - this.persistActions(session, alarmDefinition, AlarmState.UNDETERMINED, undeterminedActions); - - tx.commit(); - tx = null; - - logger.debug(ORM_LOG_MARKER, "AlarmDefinition [ {} ] has been committed to database", alarmDefinition); - - return new AlarmDefinition( - id, - name, - description, - severity, - expression, - matchBy, - true, - alarmActions == null ? Collections.emptyList() : alarmActions, - okActions == null ? Collections.emptyList() : okActions, - undeterminedActions == null ? Collections.emptyList() : undeterminedActions - ); - - } catch (RuntimeException e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public void deleteById(String tenantId, String alarmDefId) { - logger.trace(ORM_LOG_MARKER, "deleteById(...) entering..."); - - Session session = null; - Transaction tx = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - final AlarmDefinitionDb result = (AlarmDefinitionDb) session - .getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_AND_ID_NOT_DELETED) - .setString("tenant_id", tenantId) - .setString("id", alarmDefId) - .uniqueResult(); - - result.setDeletedAt(this.getUTCNow()); - session.update(result); - - // Cascade soft delete to alarms - session - .getNamedQuery(AlarmDb.Queries.DELETE_BY_ALARMDEFINITION_ID) - .setString("alarmDefinitionId", alarmDefId) - .executeUpdate(); - - tx.commit(); - tx = null; - - logger.debug(ORM_LOG_MARKER, "AlarmDefinition [ {} ] has been deleted from database", result); - - } catch (Exception e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public String exists(final String tenantId, - final String name) { - logger.trace(ORM_LOG_MARKER, "exists(...) entering..."); - - StatelessSession session = null; - try { - session = sessionFactory.openStatelessSession(); - - List ids = session - .createCriteria(AlarmDefinitionDb.class) - .add(Restrictions.eq("tenantId", tenantId)) - .add(Restrictions.eq("name", name)) - .add(Restrictions.isNull("deletedAt")) - .setProjection(Projections.property("id")) - .setMaxResults(1) - .list(); - - final String existingId = CollectionUtils.isEmpty(ids) ? null : (String) ids.get(0); - - if (null == existingId) { - logger.debug(ORM_LOG_MARKER, "No AlarmDefinition matched tenantId={} and name={}", tenantId, name); - } - - return existingId; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public List find(String tenantId, String name, Map dimensions, - List severities, List sortBy, - String offset, int limit) { - logger.trace(ORM_LOG_MARKER, "find(...) entering..."); - - Session session = null; - List resultSet = Lists.newArrayList(); - - final StringBuilder sbWhere = new StringBuilder(); - final StringBuilder limitOffset = new StringBuilder(); - final StringBuilder orderByPart = new StringBuilder(); - - if (name != null) { - sbWhere.append(" and ad.name = :name"); - } - - if (CollectionUtils.isNotEmpty(severities)) { - if (severities.size() == 1) { - sbWhere.append(" and ad.severity = :severity"); - } else { - sbWhere.append(" and ("); - for (int i = 0; i < severities.size(); i++) { - sbWhere.append("ad.severity = :severity_").append(i); - if (i < severities.size() - 1) { - sbWhere.append(" or "); - } - } - sbWhere.append(")"); - } - } - - if (limit > 0) { - limitOffset.append(" limit :limit"); - } - - if (offset != null) { - limitOffset.append(" offset :offset "); - } - - if (sortBy != null && !sortBy.isEmpty()) { - orderByPart.append(" order by ").append(COMMA_JOINER.join(sortBy)); - if (!sortBy.contains("id")) { - orderByPart.append(",id"); - } - } else { - orderByPart.append(" order by id "); - } - - final String sql = String.format( - FIND_ALARM_DEF_SQL, - SubAlarmDefinitionQueries.buildJoinClauseFor(dimensions), - sbWhere, - limitOffset, - orderByPart - ); - - try { - session = sessionFactory.openSession(); - - final Query qAlarmDefinition = session - .createSQLQuery(sql) - .setString("tenantId", tenantId) - .setReadOnly(true) - .setResultTransformer(ALARM_DEF_RESULT_TRANSFORMER); - - if (name != null) { - qAlarmDefinition.setString("name", name); - } - - if (CollectionUtils.isNotEmpty(severities)) { - if (severities.size() == 1) { - qAlarmDefinition.setString("severity", severities.get(0).name()); - } else { - for (int it = 0; it < severities.size(); it++) { - qAlarmDefinition.setString(String.format("severity_%d", it), severities.get(it).name()); - } - } - } - - if (limit > 0) { - qAlarmDefinition.setInteger("limit", limit + 1); - } - - if (offset != null) { - qAlarmDefinition.setInteger("offset", Integer.parseInt(offset)); - } - - this.bindDimensionsToQuery(qAlarmDefinition, dimensions); - - final List> alarmDefinitionDbList = qAlarmDefinition.list(); - - resultSet = CollectionUtils.isEmpty(alarmDefinitionDbList) ? - Lists.newArrayList() : - this.createAlarmDefinitions(alarmDefinitionDbList); - - } finally { - if (session != null) { - session.close(); - } - } - - return resultSet; - } - - @Override - @SuppressWarnings("unchecked") - public AlarmDefinition findById(String tenantId, String alarmDefId) { - logger.trace(ORM_LOG_MARKER, "findById(...) entering..."); - - Session session = null; - List okActionIds = null; - List alarmActionIds = null; - List undeterminedActionIds = null; - - try { - session = sessionFactory.openSession(); - - final AlarmDefinitionDb alarmDefinitionDb = (AlarmDefinitionDb) session - .getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_AND_ID_NOT_DELETED) - .setString("tenant_id", tenantId) - .setString("id", alarmDefId).uniqueResult(); - - if (alarmDefinitionDb == null) { - throw new EntityNotFoundException("No alarm definition exists for tenantId=%s and id=%s", tenantId, alarmDefId); - } - - final List alarmActionList = session - .getNamedQuery(AlarmActionDb.Queries.FIND_BY_TENANT_ID_AND_ALARMDEFINITION_ID_DISTINCT) - .setString("tenantId", tenantId) - .setString("alarmDefId", alarmDefId) - .list(); - - if(!CollectionUtils.isEmpty(alarmActionList)) { - - logger.debug(ORM_LOG_MARKER, "Located {} AlarmActions for AlarmDefinition {}", alarmActionList.size(), alarmDefinitionDb); - - okActionIds = Lists.newArrayList(); - alarmActionIds = Lists.newArrayList(); - undeterminedActionIds = Lists.newArrayList(); - - for (final AlarmActionDb alarmAction : alarmActionList) { - if (alarmAction.isInAlarmState(AlarmState.UNDETERMINED)) { - undeterminedActionIds.add(alarmAction.getAlarmActionId().getActionId()); - } else if (alarmAction.isInAlarmState(AlarmState.OK)) { - okActionIds.add(alarmAction.getAlarmActionId().getActionId()); - } else if (alarmAction.isInAlarmState(AlarmState.ALARM)) { - alarmActionIds.add(alarmAction.getAlarmActionId().getActionId()); - } - } - - } - - return new AlarmDefinition( - alarmDefinitionDb.getId(), - alarmDefinitionDb.getName(), - alarmDefinitionDb.getDescription(), - alarmDefinitionDb.getSeverity().name(), - alarmDefinitionDb.getExpression(), - this.splitStringIntoList(alarmDefinitionDb.getMatchBy()), - alarmDefinitionDb.isActionsEnabled(), - alarmActionIds == null ? Collections.emptyList() : alarmActionIds, - okActionIds == null ? Collections.emptyList() : okActionIds, - undeterminedActionIds == null ? Collections.emptyList() : undeterminedActionIds - ); - - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public Map findSubAlarmMetricDefinitions(String alarmDefId) { - logger.trace(ORM_LOG_MARKER, "findSubAlarmMetricDefinitions(...) entering..."); - - Session session = null; - Map subAlarmMetricDefs = Maps.newHashMap(); - - try { - - session = sessionFactory.openSession(); - - final List subAlarmDefList = session - .getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID) - .setString("id", alarmDefId) - .list(); - final List subAlarmDefDimensionList = session - .getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITIONDIMENSION_SUBEXPRESSION_ID) - .setString("id", alarmDefId) - .list(); - - final Map> subAlarmDefDimensionMapExpression = this.mapAlarmDefDimensionExpression( - subAlarmDefDimensionList - ); - - for (SubAlarmDefinitionDb subAlarmDef : subAlarmDefList) { - String id = subAlarmDef.getId(); - String metricName = subAlarmDef.getMetricName(); - Map dimensions = Collections.emptyMap(); - if (subAlarmDefDimensionMapExpression.containsKey(id)) { - dimensions = subAlarmDefDimensionMapExpression.get(id); - } - subAlarmMetricDefs.put(id, new MetricDefinition(metricName, dimensions)); - } - return subAlarmMetricDefs; - - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public Map findSubExpressions(String alarmDefId) { - logger.trace(ORM_LOG_MARKER, "findSubExpressions(...) entering..."); - - Session session = null; - Map subExpressions = Maps.newHashMap(); - try { - - session = sessionFactory.openSession(); - - List subAlarmDefList = session - .getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID) - .setString("id", alarmDefId) - .list(); - - Query querySybAlarmDefDimension = session - .getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITIONDIMENSION_SUBEXPRESSION_ID) - .setString("id", alarmDefId); - - List subAlarmDefDimensionList = querySybAlarmDefDimension.list(); - - Map> subAlarmDefDimensionMapExpression = mapAlarmDefDimensionExpression(subAlarmDefDimensionList); - - for (SubAlarmDefinitionDb subAlarmDef : subAlarmDefList) { - String id = subAlarmDef.getId(); - AggregateFunction function = AggregateFunction.fromJson(subAlarmDef.getFunction()); - String metricName = subAlarmDef.getMetricName(); - AlarmOperator operator = AlarmOperator.fromJson(subAlarmDef.getOperator()); - double threshold = subAlarmDef.getThreshold(); - int period = subAlarmDef.getPeriod(); - int periods = subAlarmDef.getPeriods(); - boolean isDeterministic = subAlarmDef.isDeterministic(); - Map dimensions = Collections.emptyMap(); - - if (subAlarmDefDimensionMapExpression.containsKey(id)) { - dimensions = subAlarmDefDimensionMapExpression.get(id); - } - - subExpressions.put(id, - new AlarmSubExpression( - function, - new MetricDefinition(metricName, dimensions), - operator, - threshold, - period, - periods, - isDeterministic - ) - ); - } - - return subExpressions; - - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public void update(String tenantId, String id, boolean patch, String name, String description, String expression, List matchBy, - String severity, boolean actionsEnabled, Collection oldSubAlarmIds, Map changedSubAlarms, - Map newSubAlarms, List alarmActions, List okActions, List undeterminedActions) { - logger.trace(ORM_LOG_MARKER, "update(...) entering..."); - - Transaction tx = null; - Session session = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - final AlarmDefinitionDb alarmDefinitionDb = this.updateAlarmDefinition( - tenantId, - id, - name, - description, - expression, - matchBy, - severity, - actionsEnabled, - session - ); - - this.deleteOldSubAlarms(oldSubAlarmIds, session); - this.updateChangedSubAlarms(changedSubAlarms, session); - this.createSubExpressions(session, alarmDefinitionDb, newSubAlarms); - this.deleteOldAlarmActions(id, patch, alarmActions, okActions, undeterminedActions, session); - - // Insert new actions - this.persistActions(session, alarmDefinitionDb, AlarmState.ALARM, alarmActions); - this.persistActions(session, alarmDefinitionDb, AlarmState.OK, okActions); - this.persistActions(session, alarmDefinitionDb, AlarmState.UNDETERMINED, undeterminedActions); - // Insert new actions - - tx.commit(); - tx = null; - } catch (RuntimeException e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - private void deleteOldAlarmActions(final String id, - final boolean patch, - final List alarmActions, - final List okActions, - final List undeterminedActions, - final Session session) { - if (patch) { - this.deleteActions(session, id, AlarmState.ALARM, alarmActions); - this.deleteActions(session, id, AlarmState.OK, okActions); - this.deleteActions(session, id, AlarmState.UNDETERMINED, undeterminedActions); - } else { - session - .getNamedQuery(AlarmActionDb.Queries.DELETE_BY_ALARMDEFINITION_ID) - .setString("id", id) - .executeUpdate(); - } - } - - private void updateChangedSubAlarms(final Map changedSubAlarms, - final Session session) { - if (!MapUtils.isEmpty(changedSubAlarms)) - for (Map.Entry entry : changedSubAlarms.entrySet()) { - final AlarmSubExpression sa = entry.getValue(); - final String subAlarmDefinitionId = entry.getKey(); - - SubAlarmDefinitionDb subAlarmDefinitionDb = session.get(SubAlarmDefinitionDb.class, subAlarmDefinitionId); - subAlarmDefinitionDb.setOperator(sa.getOperator().name()); - subAlarmDefinitionDb.setThreshold(sa.getThreshold()); - subAlarmDefinitionDb.setUpdatedAt(this.getUTCNow()); - subAlarmDefinitionDb.setDeterministic(sa.isDeterministic()); - session.saveOrUpdate(subAlarmDefinitionDb); - } - } - - private void deleteOldSubAlarms(final Collection oldSubAlarmIds, - final Session session) { - if (!CollectionUtils.isEmpty(oldSubAlarmIds)) { - session - .getNamedQuery(SubAlarmDefinitionDb.Queries.DELETE_BY_IDS) - .setParameterList("ids", oldSubAlarmIds) - .executeUpdate(); - } - } - - private AlarmDefinitionDb updateAlarmDefinition(final String tenantId, - final String id, - final String name, - final String description, - final String expression, - final List matchBy, - final String severity, - final boolean actionsEnabled, - final Session session) { - final AlarmDefinitionDb alarmDefinitionDb = (AlarmDefinitionDb) session - .getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_ID_AND_ID) - .setString("tenantId", tenantId) - .setString("id", id) - .uniqueResult(); - - alarmDefinitionDb.setName(name); - alarmDefinitionDb.setDescription(description); - alarmDefinitionDb.setExpression(expression); - alarmDefinitionDb.setMatchBy(matchBy == null || Iterables.isEmpty(matchBy) ? null : COMMA_JOINER.join(matchBy)); - alarmDefinitionDb.setSeverity(AlarmSeverity.valueOf(severity)); - alarmDefinitionDb.setActionsEnabled(actionsEnabled); - alarmDefinitionDb.setUpdatedAt(this.getUTCNow()); - - session.saveOrUpdate(alarmDefinitionDb); - - return alarmDefinitionDb; - } - - private void deleteActions(final Session session, - final String id, - final AlarmState alarmState, - final List actions) { - if (!CollectionUtils.isEmpty(actions)) - session - .getNamedQuery(AlarmActionDb.Queries.DELETE_BY_ALARMDEFINITION_ID_AND_ALARMSTATE) - .setString("id", id) - .setString("alarmState", alarmState.name()) - .executeUpdate(); - } - - private Map> mapAlarmDefDimensionExpression(List subAlarmDefDimensionList) { - Map> subAlarmDefDimensionMapExpression = Maps.newHashMapWithExpectedSize(subAlarmDefDimensionList.size()); - - // Map expressions on sub_alarm_definition_dimension.sub_alarm_definition_id = - // sub_alarm_definition.id - for (SubAlarmDefinitionDimensionDb subAlarmDefDimension : subAlarmDefDimensionList) { - String subAlarmDefId = subAlarmDefDimension.getSubAlarmDefinitionDimensionId().getSubExpression().getId(); - String name = subAlarmDefDimension.getSubAlarmDefinitionDimensionId().getDimensionName(); - String value = subAlarmDefDimension.getValue(); - - if (subAlarmDefDimensionMapExpression.containsKey(subAlarmDefId)) { - subAlarmDefDimensionMapExpression.get(subAlarmDefId).put(name, value); - } else { - Map expressionMap = Maps.newHashMap(); - expressionMap.put(name, value); - subAlarmDefDimensionMapExpression.put(subAlarmDefId, expressionMap); - } - } - - return subAlarmDefDimensionMapExpression; - } - - private void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - query.setString("dname" + i, entry.getKey()); - query.setString("dvalue" + i, entry.getValue()); - } - } - } - - private List createAlarmDefinitions(List> rows) { - final List result = new ArrayList<>(); - Map> okActionIdsMap = Maps.newHashMap(); - Map> alarmActionIdsMap = Maps.newHashMap(); - Map> undeterminedActionIdsMap = Maps.newHashMap(); - Set alarmDefinitionSet = Sets.newHashSet(); - - for (Map row : rows) { - - String alarmDefId = (String) row.get(ID); - String singleState = (String) row.get(STATE); - String notificationId = (String) row.get(NOTIFICATION_ID); - - if (!okActionIdsMap.containsKey(alarmDefId)) { - okActionIdsMap.put(alarmDefId, Lists.newArrayList()); - } - if (!alarmActionIdsMap.containsKey(alarmDefId)) { - alarmActionIdsMap.put(alarmDefId, Lists.newArrayList()); - } - if (!undeterminedActionIdsMap.containsKey(alarmDefId)) { - undeterminedActionIdsMap.put(alarmDefId, Lists.newArrayList()); - } - - if (singleState != null && notificationId != null) { - if (singleState.equals(AlarmState.UNDETERMINED.name())) { - undeterminedActionIdsMap.get(alarmDefId).add(notificationId); - } - if (singleState.equals(AlarmState.OK.name())) { - okActionIdsMap.get(alarmDefId).add(notificationId); - } - if (singleState.equals(AlarmState.ALARM.name())) { - alarmActionIdsMap.get(alarmDefId).add(notificationId); - } - } - } - - for (Map row : rows) { - String alarmDefId = (String) row.get(ID); - - if (!alarmDefinitionSet.contains(alarmDefId)) { - String name = (String) row.get(NAME); - String description = (String) row.get(DESCRIPTION); - String severity = (String) row.get(SEVERITY); - String expression = (String) row.get(EXPRESSION); - List match = this.splitStringIntoList((String) row.get(MATCH_BY)); - Boolean actionEnabled = (Boolean) row.get(ACTIONS_ENABLED); - - AlarmDefinition ad = new AlarmDefinition( - alarmDefId, - name, - description, - severity, - expression, - match, - actionEnabled, - alarmActionIdsMap.get(alarmDefId), - okActionIdsMap.get(alarmDefId), - undeterminedActionIdsMap.get(alarmDefId) - ); - - result.add(ad); - } - - alarmDefinitionSet.add(alarmDefId); - } - return result; - } - - private List splitStringIntoList(String str) { - return str == null ? Lists.newArrayList() : Lists.newArrayList(COMMA_SPLITTER.split(str)); - } - - private void createSubExpressions(Session session, - AlarmDefinitionDb alarmDefinition, - Map alarmSubExpressions) { - - if (alarmSubExpressions != null) { - for (Map.Entry subEntry : alarmSubExpressions.entrySet()) { - String subAlarmId = subEntry.getKey(); - AlarmSubExpression subExpr = subEntry.getValue(); - MetricDefinition metricDef = subExpr.getMetricDefinition(); - - // Persist sub-alarm - final DateTime now = this.getUTCNow(); - final SubAlarmDefinitionDb subAlarmDefinitionDb = new SubAlarmDefinitionDb( - subAlarmId, - alarmDefinition, - subExpr.getFunction().name(), - metricDef.name, - subExpr.getOperator().name(), - subExpr.getThreshold(), - subExpr.getPeriod(), - subExpr.getPeriods(), - now, - now, - subExpr.isDeterministic() - ); - session.save(subAlarmDefinitionDb); - - // Persist sub-alarm dimensions - if (!MapUtils.isEmpty(metricDef.dimensions)) { - SubAlarmDefinitionDimensionDb definitionDimension; - SubAlarmDefinitionDimensionId definitionDimensionId; - - for (Map.Entry dimEntry : metricDef.dimensions.entrySet()) { - definitionDimensionId = new SubAlarmDefinitionDimensionId(subAlarmDefinitionDb, dimEntry.getKey()); - definitionDimension = new SubAlarmDefinitionDimensionDb(definitionDimensionId, dimEntry.getValue()); - session.save(definitionDimension); - } - - } - } - } - - } - - private void persistActions(final Session session, - final AlarmDefinitionDb alarmDefinition, - final AlarmState alarmState, - final List actions) { - if (actions != null) { - for (String action : actions) { - session.save(new AlarmActionDb(alarmDefinition, alarmState, action)); - } - } - } - - // method extracted for code-readability - private static ResultTransformer getAlarmDefResultTransformer() { - return new ResultTransformer() { - private static final long serialVersionUID = -3052468375925339521L; - - @Override - public Object transformTuple(final Object[] tuple, final String[] aliases) { - for (int i = 0, length = aliases.length; i < length; i++) { - aliases[i] = aliases[i].toUpperCase(); - } - return AliasToEntityMapResultTransformer - .INSTANCE - .transformTuple(tuple, aliases); - } - - @Override - public List transformList(final List collection) { - return AliasToEntityMapResultTransformer - .INSTANCE - .transformList(collection); - } - }; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtils.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtils.java deleted file mode 100644 index a3dcee7ae..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtils.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import com.google.common.collect.Lists; -import org.hibernate.Query; -import org.hibernate.SessionFactory; -import org.hibernate.StatelessSession; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.inject.Inject; -import javax.inject.Named; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import monasca.api.infrastructure.persistence.Utils; - -public class AlarmHibernateUtils - extends Utils { - private static final Logger logger = LoggerFactory.getLogger(AlarmHibernateUtils.class); - private static final String FIND_ALARM_IDS_SQL = - "select distinct a.id, ad.created_at " - + "from alarm as a " - + "join alarm_definition as ad on a.alarm_definition_id = ad.id " - + "%s " - + "where ad.tenant_id = :tenantId and ad.deleted_at is NULL " - + "order by ad.created_at"; - - private final SessionFactory sessionFactory; - - @Inject - public AlarmHibernateUtils(@Named("orm") SessionFactory sessionFactory) { - this.sessionFactory = sessionFactory; - } - - public List findAlarmIds(String tenantId, Map dimensions) { - logger.trace(BaseSqlRepo.ORM_LOG_MARKER, "findAlarmIds(...) entering"); - List alarmIdList = null; - - StatelessSession session = null; - try { - session = sessionFactory.openStatelessSession(); - - final String sql = this.findAlarmQueryString(dimensions); - final Query query = session - .createSQLQuery(sql) - .setString("tenantId", tenantId); - - this.bindDimensionsToQuery(query, dimensions); - - @SuppressWarnings("unchecked") List rows = query.list(); - alarmIdList = Lists.newArrayListWithCapacity(rows.size()); - for (Object[] row : rows) { - String id = (String) row[0]; - alarmIdList.add(id); - } - - } finally { - if (session != null) { - session.close(); - } - } - - // no need to check if alarmIdList != null, because in case of exception method - // will leave immediately, otherwise list wont be null. - return alarmIdList; - } - - private String findAlarmQueryString(final Map dimensions) { - return String.format(FIND_ALARM_IDS_SQL, this.buildJoinClauseFor(dimensions)); - } - - /* - duplicate required - monasca.api.infrastructure.persistence.DimensionQueries.bindDimensionsToQuery() - has incompatible signature - */ - private void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - query.setString("dname" + i, entry.getKey()); - query.setString("dvalue" + i, entry.getValue()); - } - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepoImpl.java deleted file mode 100644 index ace74a1b1..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepoImpl.java +++ /dev/null @@ -1,732 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Named; - -import com.google.common.base.Function; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.base.Splitter; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import org.apache.commons.collections4.CollectionUtils; -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.hibernate.criterion.Projections; -import org.hibernate.criterion.Restrictions; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmCount; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.SubAlarmDb; -import monasca.common.hibernate.type.BinaryId; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.common.util.Conversions; - -/** - * Alarmed metric repository implementation. - */ -public class AlarmSqlRepoImpl - extends BaseSqlRepo - implements AlarmRepo { - - private static final Logger logger = LoggerFactory.getLogger(AlarmSqlRepoImpl.class); - private static final Joiner COMMA_JOINER = Joiner.on(","); - private static final Joiner SPACE_JOINER = Joiner.on(" "); - private static final Splitter SPACE_SPLITTER = Splitter.on(" "); - private static final AlarmSortByFunction ALARM_SORT_BY_FUNCTION = new AlarmSortByFunction(); - - private static final String FIND_ALARM_BY_ID_SQL = - "select distinct ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, " - + "a.id, a.state, a.updatedAt, a.createdAt as created_timestamp, " - + "md.name as metric_name, mdg.id.name, mdg.value, a.lifecycleState, a.link, a.stateUpdatedAt, " - + "mdg.id.dimensionSetId from AlarmDb as a " - + ", AlarmDefinitionDb as ad " - + ", AlarmMetricDb as am " - + ", MetricDefinitionDimensionsDb as mdd " - + ", MetricDefinitionDb as md " - + ", MetricDimensionDb as mdg " - + "where " - + " ad.id = a.alarmDefinition.id " - + " and am.alarmMetricId.alarm.id = a.id " - + " and mdd.id = am.alarmMetricId.metricDefinitionDimensions.id " - + " and md.id = mdd.metricDefinition.id " - + " and mdg.id.dimensionSetId = mdd.metricDimensionSetId " - + " and ad.tenantId = :tenantId " - + " %s " - + " and ad.deletedAt is null order by a.id, mdg.id.dimensionSetId %s"; - - private static final String FIND_ALARMS_SQL = - "select ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, " - + "a.id, a.state, a.updated_at as updated_timestamp, a.created_at as created_timestamp, " - + "md.name as metric_name, mdg.name, mdg.value, a.lifecycle_state, a.link, a.state_updated_at as state_updated_timestamp, " - + "mdg.dimension_set_id " - + "from alarm as a " - + "inner join %s as alarm_id_list on alarm_id_list.id = a.id " - + "inner join alarm_definition ad on ad.id = a.alarm_definition_id " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd on mdd.id = am.metric_definition_dimensions_id " - + "inner join metric_definition as md on md.id = mdd.metric_definition_id " - + "left outer join (select dimension_set_id, name, value " - + "from metric_dimension group by dimension_set_id, name, value) as mdg on mdg.dimension_set_id = mdd.metric_dimension_set_id " - + "%s"; - - @Inject - public AlarmSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) { - super(sessionFactory); - } - - @Override - public void deleteById(String tenantId, String id) { - logger.trace(ORM_LOG_MARKER, "deleteById(...) entering"); - - Transaction tx = null; - Session session = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - final long result = (Long) session - .createCriteria(AlarmDb.class, "a") - .createAlias("alarmDefinition", "ad") - .add(Restrictions.conjunction( - Restrictions.eq("a.id", id), - Restrictions.eq("ad.tenantId", tenantId), - Restrictions.eqProperty("a.alarmDefinition.id", "ad.id"), - Restrictions.isNull("ad.deletedAt") - )) - .setProjection(Projections.count("a.id")) - .setReadOnly(true) - .uniqueResult(); - - // This will throw an EntityNotFoundException if Alarm doesn't exist or has a different tenant - // id - if (result < 1) { - throw new EntityNotFoundException("No alarm exists for %s", id); - } - - // delete alarm - session - .getNamedQuery(AlarmDb.Queries.DELETE_BY_ID) - .setString("id", id) - .executeUpdate(); - - tx.commit(); - tx = null; - } catch (Exception e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - - } - - @SuppressWarnings("unchecked") - @Override - public List find(final String tenantId, - final String alarmDefId, - final String metricName, - final Map metricDimensions, - final AlarmState state, - final List severities, - final String lifecycleState, - final String link, - final DateTime stateUpdatedStart, - final List sortBy, - final String offset, - final int limit, - final boolean enforceLimit) { - logger.trace(ORM_LOG_MARKER, "find(...) entering"); - - Preconditions.checkNotNull(tenantId, "TenantId is required"); - - Session session = null; - - List alarms = new LinkedList<>(); - - try { - final Query query; - - final String sortByClause = ALARM_SORT_BY_FUNCTION.apply(sortBy); - final String alarmsSubQuery = this.getFindAlarmsSubQuery( - alarmDefId, - metricName, - metricDimensions, - state, - severities, - lifecycleState, - link, - stateUpdatedStart, - sortBy, - offset, - limit, - enforceLimit - ); - - final String sql = String.format(FIND_ALARMS_SQL, alarmsSubQuery, sortByClause); - - try { - query = new Function(){ - - @Nullable - @Override - public Query apply(@Nullable final Session input) { - assert input != null; - final Query query = input.createSQLQuery(sql) - .setReadOnly(true); - - query.setString("tenantId", tenantId); - - if (alarmDefId != null) { - query.setString("alarmDefId", alarmDefId); - } - - if (metricName != null) { - query.setString("metricName", metricName); - } - - if (state != null) { - query.setString("state", state.name()); - } - - if (CollectionUtils.isNotEmpty(severities)) { - if (severities.size() == 1) { - query.setString("severity", severities.get(0).name()); - } else { - for (int it = 0; it < severities.size(); it++) { - query.setString(String.format("severity_%d", it), severities.get(it).name()); - } - } - } - - if (link != null) { - query.setString("link", link); - } - - if (lifecycleState != null) { - query.setString("lifecycleState", lifecycleState); - } - - if (stateUpdatedStart != null) { - query.setDate("stateUpdatedStart", stateUpdatedStart.toDateTime(DateTimeZone.UTC).toDate()); - } - - if (enforceLimit && limit > 0) { - query.setInteger("limit", limit + 1); - } - - bindDimensionsToQuery(query, metricDimensions); - - return query; - } - - }.apply((session = sessionFactory.openSession())); - } catch (Exception e) { - logger.error("Failed to bind query {}, error is {}", sql, e.getMessage()); - throw new RuntimeException("Failed to bind query", e); - } - - List alarmList = (List) query.list(); - - if(alarmList.isEmpty()){ - return Collections.emptyList(); - } - - alarms = createAlarms(alarmList); - - } finally { - if (session != null) { - session.close(); - } - } - return alarms; - - } - - private String getFindAlarmsSubQuery(final String alarmDefId, - final String metricName, - final Map metricDimensions, - final AlarmState state, - final List severities, - final String lifecycleState, - final String link, - final DateTime stateUpdatedStart, - final List sortBy, - final String offset, - final int limit, - final boolean enforceLimit) { - final StringBuilder - sbWhere = - new StringBuilder("(select distinct a.id " - + "from alarm as a, alarm_definition as ad " - + "where ad.id = a.alarm_definition_id " - + " and ad.deleted_at is null " - + " and ad.tenant_id = :tenantId "); - - if (alarmDefId != null) { - sbWhere.append(" and ad.id = :alarmDefId "); - } - - if (metricName != null) { - - sbWhere.append(" and a.id in (select distinct a.id from alarm as a " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd " - + " on mdd.id = am.metric_definition_dimensions_id " - + "inner join (select distinct id from metric_definition " - + " where name = :metricName) as md " - + " on md.id = mdd.metric_definition_id "); - - buildJoinClauseFor(metricDimensions, sbWhere); - - sbWhere.append(")"); - - } else if (metricDimensions != null) { - - sbWhere.append(" and a.id in (select distinct a.id from alarm as a " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd " - + " on mdd.id = am.metric_definition_dimensions_id "); - - buildJoinClauseFor(metricDimensions, sbWhere); - - sbWhere.append(")"); - - } - - if (state != null) { - sbWhere.append(" and a.state = :state"); - } - - if (CollectionUtils.isNotEmpty(severities)) { - if (severities.size() == 1) { - sbWhere.append(" and ad.severity = :severity"); - } else { - sbWhere.append(" and ("); - for (int i = 0; i < severities.size(); i++) { - sbWhere.append("ad.severity = :severity_").append(i); - if (i < severities.size() - 1) { - sbWhere.append(" or "); - } - } - sbWhere.append(")"); - } - } - - if (lifecycleState != null) { - sbWhere.append(" and a.lifecycle_state = :lifecycleState"); - } - - if (link != null) { - sbWhere.append(" and a.link = :link"); - } - - if (stateUpdatedStart != null) { - sbWhere.append(" and a.state_updated_at >= :stateUpdatedStart"); - } - - if (enforceLimit && limit > 0) { - sbWhere.append(" limit :limit"); - } - if (offset != null) { - sbWhere.append(" offset "); - sbWhere.append(offset); - sbWhere.append(' '); - } - - sbWhere.append(")"); - - return sbWhere.toString(); - } - - private List createAlarms(List alarmList) { - List alarms = Lists.newLinkedList(); - - String previousAlarmId = null; - BinaryId previousDimensionSetId = null; - List alarmedMetrics = null; - Map dimensionMap = new HashMap<>(); - - for (Object[] alarmRow : alarmList) { - String alarmDefinitionId = (String) alarmRow[0]; - AlarmSeverity severity = Conversions.variantToEnum(alarmRow[1], AlarmSeverity.class); - AlarmState alarmState = Conversions.variantToEnum(alarmRow[4], AlarmState.class); - DateTime updatedTimestamp = Conversions.variantToDateTime(alarmRow[5]); - DateTime createdTimestamp = Conversions.variantToDateTime(alarmRow[6]); - BinaryId dimensionSetId = this.convertBinaryId(alarmRow[13]); - DateTime stateUpdatedTimestamp = Conversions.variantToDateTime(alarmRow[12]); - - String alarm_definition_name = (String) alarmRow[2]; - String id = (String) alarmRow[3]; - - String lifecycle_state = (String) alarmRow[10]; - String link = (String) alarmRow[11]; - - String metric_name = (String) alarmRow[7]; - String dimension_name = (String) alarmRow[8]; - String dimension_value = (String) alarmRow[9]; - - if (!id.equals(previousAlarmId)) { - alarmedMetrics = new ArrayList<>(); - dimensionMap = Maps.newHashMap(); - alarmedMetrics.add(new MetricDefinition(metric_name, dimensionMap)); - - alarms.add(new Alarm(id, alarmDefinitionId, alarm_definition_name, severity.name(), - alarmedMetrics, alarmState, lifecycle_state, link, - stateUpdatedTimestamp, updatedTimestamp, createdTimestamp - )); - - previousDimensionSetId = dimensionSetId; - } - - if (!dimensionSetId.equals(previousDimensionSetId)) { - dimensionMap = Maps.newHashMap(); - alarmedMetrics.add(new MetricDefinition(metric_name, dimensionMap)); - } - - dimensionMap.put(dimension_name, dimension_value); - - previousDimensionSetId = dimensionSetId; - previousAlarmId = id; - } - return alarms; - } - - private BinaryId convertBinaryId(final Object o) { - final BinaryId dimensionSetId; - if (o instanceof BinaryId) { - dimensionSetId = (BinaryId) o; - } else { - dimensionSetId = new BinaryId((byte[]) o); - } - return dimensionSetId; - } - - private void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - query.setString("dname" + i, entry.getKey()); - query.setString("dvalue" + i, entry.getValue()); - } - } - } - - private void buildJoinClauseFor(Map dimensions, StringBuilder sbJoin) { - if (dimensions == null) { - return; - } - for (int i = 0; i < dimensions.size(); i++) { - final String indexStr = String.valueOf(i); - sbJoin.append(" inner join metric_dimension md").append(indexStr).append(" on md") - .append(indexStr) - .append(".name = :dname").append(indexStr).append(" and md").append(indexStr) - .append(".value = :dvalue").append(indexStr) - .append(" and mdd.metric_dimension_set_id = md") - .append(indexStr).append(".dimension_set_id"); - } - } - - @Override - @SuppressWarnings("unchecked") - public Alarm findById(String tenantId, String id) { - logger.trace(ORM_LOG_MARKER, "findById(...) entering"); - - Session session = null; - - final String sql = String.format(FIND_ALARM_BY_ID_SQL, " and a.id = :id", ""); - List alarms = new LinkedList<>(); - try { - session = sessionFactory.openSession(); - Query qAlarmDefinition = - session.createQuery(sql).setString("tenantId", tenantId) - .setString("id", id); - List alarmList = (List) qAlarmDefinition.list(); - - if (alarmList.isEmpty()) { - throw new EntityNotFoundException("No alarm exists for %s", id); - } - - alarms = this.createAlarms(alarmList); - - } finally { - if (session != null) { - session.close(); - } - } - return alarms.get(0); - } - - @Override - public Alarm update(String tenantId, String id, AlarmState state, String lifecycleState, String link) { - Session session = null; - Alarm originalAlarm = null; - Transaction tx = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - originalAlarm = findById(tenantId, id); - - AlarmDb result = (AlarmDb) session - .getNamedQuery(AlarmDb.Queries.FIND_BY_ID) - .setString("id", id) - .uniqueResult(); - - if (!originalAlarm.getState().equals(state)) { - result.setStateUpdatedAt(this.getUTCNow()); - result.setState(state); - } - - result.setUpdatedAt(this.getUTCNow()); - result.setLink(link); - result.setLifecycleState(lifecycleState); - session.update(result); - - tx.commit(); - tx = null; - } catch (Exception e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - return originalAlarm; - } - - @Override - @SuppressWarnings("unchecked") - public Map findAlarmSubExpressions(String alarmId) { - Session session = null; - final Map subAlarms = Maps.newHashMap(); - logger.debug("AlarmSqlRepoImpl[findAlarmSubExpressions] called"); - try { - - session = sessionFactory.openSession(); - final List result = session - .getNamedQuery(SubAlarmDb.Queries.BY_ALARM_ID) - .setString("id", alarmId) - .list(); - - if (result != null) { - for (SubAlarmDb row : result) { - subAlarms.put(row.getId(), AlarmSubExpression.of(row.getExpression())); - } - } - } finally { - if (session != null) { - session.close(); - } - } - return subAlarms; - } - - @Override - @SuppressWarnings("unchecked") - public Map> findAlarmSubExpressionsForAlarmDefinition( - String alarmDefinitionId) { - logger.trace(ORM_LOG_MARKER, "findAlarmSubExpressionsForAlarmDefinition(...) entering"); - - Session session = null; - Transaction tx = null; - Map> subAlarms = Maps.newHashMap(); - - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - final Iterator rows = session - .getNamedQuery(SubAlarmDb.Queries.BY_ALARMDEFINITION_ID) - .setString("id", alarmDefinitionId) - .setReadOnly(true) - .iterate(); - - while (rows.hasNext()) { - - final SubAlarmDb row = rows.next(); - final String alarmId = (String) session.getIdentifier(row.getAlarm()); - - Map alarmMap = subAlarms.get(alarmId); - if (alarmMap == null) { - alarmMap = Maps.newHashMap(); - subAlarms.put(alarmId, alarmMap); - } - - final String id = row.getId(); - final String expression = row.getExpression(); - alarmMap.put(id, AlarmSubExpression.of(expression)); - } - - tx.commit(); - tx = null; - - } catch (Exception exp) { - this.rollbackIfNotNull(tx); - throw exp; - } finally { - if (session != null) { - session.close(); - } - } - - return subAlarms; - } - - @Override - public AlarmCount getAlarmsCount(String tenantId, String alarmDefId, String metricName, - Map metricDimensions, AlarmState state, - List severities, String lifecycleState, String link, - DateTime stateUpdatedStart, List groupBy, - String offset, int limit) { - // Not Implemented - return null; - } - - private static class AlarmSortByFunction - implements Function, String> { - - static final Map> SORT_BY_TO_COLUMN_ALIAS = Maps.newHashMapWithExpectedSize(10); - - static { - SORT_BY_TO_COLUMN_ALIAS.put("alarm_id", - Lists.newArrayList("a.id")); - SORT_BY_TO_COLUMN_ALIAS.put("alarm_definition_id", - Lists.newArrayList("ad.id")); - SORT_BY_TO_COLUMN_ALIAS.put("alarm_definition_name", - Lists.newArrayList("ad.name")); - SORT_BY_TO_COLUMN_ALIAS.put("created_timestamp", - Lists.newArrayList("a.created_at")); - SORT_BY_TO_COLUMN_ALIAS.put("updated_timestamp", - Lists.newArrayList("a.updated_at")); - SORT_BY_TO_COLUMN_ALIAS.put("state_updated_timestamp", - Lists.newArrayList("a.state_updated_at")); - SORT_BY_TO_COLUMN_ALIAS.put("state", - Lists.newArrayList("a.state='OK'", "a.state='UNDETERMINED'", "a.state='ALARM'")); - SORT_BY_TO_COLUMN_ALIAS.put("severity", - Lists.newArrayList("ad.severity='LOW'", "ad.severity='MEDIUM'", "ad.severity='HIGH'", "ad.severity='CRITICAL'")); - } - - @Nullable - @Override - public String apply(@Nullable final List input) { - final StringBuilder orderClause = new StringBuilder(" ORDER BY "); - - if (CollectionUtils.isEmpty(input)) { - return orderClause.append("a.id ASC ").toString(); - } - - final List sortByElements = Lists.newArrayListWithExpectedSize(input.size()); - boolean alarmIdUsed = false; - - for (final String sortByElement : input) { - final List split = SPACE_SPLITTER.splitToList(sortByElement); - - final String sortAlias = split.get(0); - final String sortOrder = split.size() >= 2 ? split.get(1).toUpperCase() : ""; - - final List columnAlias = SORT_BY_TO_COLUMN_ALIAS.get(sortAlias); - alarmIdUsed = "alarm_id".equals(sortAlias); - - if (columnAlias != null) { - sortByElements.add(new CaseWhenSortClauseFunction(sortOrder).apply(columnAlias)); - } - - } - - if (!alarmIdUsed) { - sortByElements.add("a.id ASC"); - } - - orderClause.append(COMMA_JOINER.join(sortByElements)); - - return orderClause.toString(); - } - - } - - private static final class CaseWhenSortClauseFunction - implements Function, String> { - - private final String sortOrder; - - CaseWhenSortClauseFunction(final String sortOrder) { - this.sortOrder = sortOrder; - } - - @Nullable - @Override - public String apply(@Nullable final List input) { - assert input != null; - if (input.size() == 1) { - return String.format("%s %s", input.get(0), this.sortOrder).trim(); - } else { - - final List builder = Lists.newArrayList("CASE"); - final boolean ascendingOrder = this.isAscendingOrder(); - - for (int it = 0; it < input.size(); it++) { - final String[] parts = input.get(it).split("="); - - final String columnName = parts[0]; - final String columnValue = parts[1]; - final int orderValue = ascendingOrder ? it : input.size() - it - 1; - - builder.add(String.format("WHEN %s=%s THEN %s", - columnName, columnValue, orderValue)); - - } - - builder.add(String.format("ELSE %s", input.size())); - builder.add("END"); - - return SPACE_JOINER.join(builder); - } - } - - private boolean isAscendingOrder() { - return "".equals(this.sortOrder) || "ASC".equals(this.sortOrder); - } - - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/BaseSqlRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/BaseSqlRepo.java deleted file mode 100644 index 473d6d0e9..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/BaseSqlRepo.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.Marker; -import org.slf4j.MarkerFactory; - -/** - * Abstract foundation for ORM repositories. - */ -abstract class BaseSqlRepo { - protected static final Marker ORM_LOG_MARKER = MarkerFactory.getMarker("ORM"); - private static final Logger LOG = LoggerFactory.getLogger(BaseSqlRepo.class); - protected final SessionFactory sessionFactory; - - protected BaseSqlRepo(final SessionFactory sessionFactory) { - this.sessionFactory = sessionFactory; - } - - /** - * Rollbacks passed {@code tx} transaction if such is not null. - * Assumption is being made that {@code tx} being null means transaction - * has been successfully committed. - * - * @param tx {@link Transaction} object - */ - protected void rollbackIfNotNull(final Transaction tx) { - if (tx != null) { - try { - tx.rollback(); - } catch (RuntimeException rbe) { - LOG.error(ORM_LOG_MARKER, "Couldn't roll back transaction", rbe); - } - } - } - - /** - * Returns UTC based {@link DateTime#now()} - * - * @return current date/time in UTC - * - * @see DateTimeZone#UTC - * @see DateTime#now() - */ - protected DateTime getUTCNow() { - return DateTime.now(DateTimeZone.UTC); - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepoImpl.java deleted file mode 100644 index 060353b71..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepoImpl.java +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.List; -import java.util.UUID; - -import javax.inject.Inject; -import javax.inject.Named; - -import com.google.common.base.Joiner; -import com.google.common.collect.Lists; -import org.apache.commons.collections4.CollectionUtils; -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.common.hibernate.db.NotificationMethodDb; -import monasca.common.model.alarm.AlarmNotificationMethodType; - -/** - * Notification method repository implementation. - */ -public class NotificationMethodSqlRepoImpl - extends BaseSqlRepo - implements NotificationMethodRepo { - private static final Joiner COMMA_JOINER = Joiner.on(','); - private static final Logger LOG = LoggerFactory.getLogger(NotificationMethodSqlRepoImpl.class); - - @Inject - public NotificationMethodSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) { - super(sessionFactory); - } - - @Override - public NotificationMethod create(String tenantId, String name, String notificationMethodType, - String address, int period) { - Transaction tx = null; - Session session = null; - try { - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - if (byTenantIdAndName(session, tenantId, name) != null) { - throw new EntityExistsException("Notification method %s \"%s\" already exists.", tenantId, - name); - } - - final String id = UUID.randomUUID().toString(); - final DateTime now = this.getUTCNow(); - final NotificationMethodDb db = new NotificationMethodDb( - id, - tenantId, - name, - AlarmNotificationMethodType.valueOf(notificationMethodType), - address, - period, - now, - now - ); - session.save(db); - - LOG.debug("Creating notification method {} for {}", name, tenantId); - tx.commit(); - tx = null; - - return this.convertToNotificationMethod(db); - } catch (RuntimeException e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public void deleteById(String tenantId, String notificationMethodId) { - Session session = null; - Transaction tx = null; - try { - if (!exists(tenantId, notificationMethodId)) { - throw new EntityNotFoundException("No notification exists for %s", notificationMethodId); - } - session = sessionFactory.openSession(); - tx = session.beginTransaction(); - - // delete notification - session - .getNamedQuery(NotificationMethodDb.Queries.DELETE_BY_ID) - .setString("id", notificationMethodId) - .executeUpdate(); - - tx.commit(); - tx = null; - } catch (RuntimeException e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public boolean exists(String tenantId, String notificationMethodId) { - Session session = null; - try { - session = sessionFactory.openSession(); - return this.getByTenantIdAndId(session, tenantId, notificationMethodId) != null; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public NotificationMethod findById(String tenantId, String notificationMethodId) { - Session session = null; - try { - session = sessionFactory.openSession(); - - final NotificationMethodDb result = this.getByTenantIdAndId(session, tenantId, notificationMethodId); - - if (result == null) { - throw new EntityNotFoundException("No notification method exists for %s", - notificationMethodId); - } - - return this.convertToNotificationMethod(result); - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - public NotificationMethod update(String tenantId, String notificationMethodId, String name, - String notificationMethodType, String address, int period) { - Session session = null; - Transaction tx = null; - try { - session = sessionFactory.openSession(); - final NotificationMethodDb result = this.byTenantIdAndName(session, tenantId, name); - - if (result != null && !result.getId().equalsIgnoreCase(notificationMethodId)) { - throw new EntityExistsException("Notification method %s \"%s\" already exists.", tenantId, - name); - } - - tx = session.beginTransaction(); - - NotificationMethodDb db; - if ((db = session.get(NotificationMethodDb.class, notificationMethodId)) == null) { - throw new EntityNotFoundException("No notification method exists for %s", - notificationMethodId); - } - db.setName(name); - db.setType(AlarmNotificationMethodType.valueOf(notificationMethodType)); - db.setAddress(address); - db.setPeriod(period); - db.setUpdatedAt(this.getUTCNow()); - - session.save(db); - tx.commit(); - tx = null; - - return this.convertToNotificationMethod(db); - - } catch (RuntimeException e) { - this.rollbackIfNotNull(tx); - throw e; - } finally { - if (session != null) { - session.close(); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public List find(String tenantId, List sortBy, String offset, - int limit) { - Session session = null; - List resultList; - List notificationList = Lists.newArrayList(); - final String rawQuery = "from NotificationMethodDb where tenant_id = :tenantId %1$s"; - - try { - session = sessionFactory.openSession(); - - final StringBuilder orderByPart = new StringBuilder(); - if (sortBy != null && !sortBy.isEmpty()) { - orderByPart.append(" order by ").append(COMMA_JOINER.join(sortBy)); - if (!sortBy.contains("id")) { - orderByPart.append(",id"); - } - } else { - orderByPart.append(" order by id "); - } - - final String queryHql = String.format(rawQuery, orderByPart); - final Query query = session.createQuery(queryHql).setString("tenantId", tenantId); - - if (limit > 0) { - query.setMaxResults(limit + 1); - } - - if (offset != null && !offset.isEmpty()) { - query.setFirstResult(Integer.parseInt(offset)); - } - - resultList = query.list(); - - if (CollectionUtils.isEmpty(resultList)) { - return notificationList; - } - - for (NotificationMethodDb item : resultList) { - notificationList.add(this.convertToNotificationMethod(item)); - } - - return notificationList; - - } finally { - if (session != null) { - session.close(); - } - } - - } - - protected NotificationMethodDb byTenantIdAndName(final Session session, - final String tenantId, - final String name) { - - return (NotificationMethodDb) session - .getNamedQuery(NotificationMethodDb.Queries.NOTIFICATION_BY_TENANT_ID_AND_NAME) - .setString("tenantId", tenantId) - .setString("name", name) - .uniqueResult(); - } - - protected NotificationMethodDb getByTenantIdAndId(final Session session, - final String tenantId, - final String id) { - - return (NotificationMethodDb) session - .getNamedQuery(NotificationMethodDb.Queries.FIND_BY_TENANT_ID_AND_ID) - .setString("tenantId", tenantId) - .setString("id", id) - .uniqueResult(); - } - - protected NotificationMethod convertToNotificationMethod(final NotificationMethodDb db) { - return db == null ? null : new NotificationMethod( - db.getId(), - db.getName(), - db.getType().toString(), - db.getAddress(), - db.getPeriod() - ); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepoImpl.java deleted file mode 100644 index d10addd0f..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepoImpl.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.ArrayList; -import java.util.List; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.domain.model.notificationmethod.NotificationMethodTypesRepo; -import monasca.common.hibernate.db.NotificationMethodTypesDb; - -/** - * Notification method repository implementation. - */ -public class NotificationMethodTypesSqlRepoImpl - extends BaseSqlRepo - implements NotificationMethodTypesRepo { - private static final Logger LOG = LoggerFactory.getLogger(NotificationMethodTypesSqlRepoImpl.class); - - @Inject - public NotificationMethodTypesSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) { - super(sessionFactory); - } - - - @Override - @SuppressWarnings("unchecked") - public List listNotificationMethodTypes() { - - Session session = null; - List notification_method_types = new ArrayList(); - - try { - session = sessionFactory.openSession(); - //Query q = session.createSQLQuery("Select * from notification_method_type").addEntity(String.class); - Query q = session.createQuery("from NotificationMethodTypesDb"); - - List resultList = q.list(); - for (NotificationMethodTypesDb type : resultList){ - notification_method_types.add(type.getName()); - } - return notification_method_types; - - } finally { - if (session != null) { - session.close(); - } - } - - } - - } diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9AlarmStateHistoryRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9AlarmStateHistoryRepo.java deleted file mode 100644 index 3ab5c1bde..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9AlarmStateHistoryRepo.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.inject.Inject; - -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.infrastructure.persistence.Utils; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmTransitionSubAlarm; -import monasca.common.model.metric.MetricDefinition; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.Date; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; - -public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo { - - private static final Logger logger = LoggerFactory - .getLogger(InfluxV9AlarmStateHistoryRepo.class); - - private final Utils utils; - private final InfluxV9RepoReader influxV9RepoReader; - private final InfluxV9Utils influxV9Utils; - private final PersistUtils persistUtils; - private static final ObjectMapper objectMapper = new ObjectMapper(); - - static { - objectMapper - .setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - } - - private static final TypeReference> METRICS_TYPE = - new TypeReference>() {}; - - private static final TypeReference> SUB_ALARMS_TYPE = - new TypeReference>() {}; - - @Inject - public InfluxV9AlarmStateHistoryRepo(Utils utils, - InfluxV9RepoReader influxV9RepoReader, - InfluxV9Utils influxV9Utils, - PersistUtils persistUtils) { - - this.utils = utils; - this.influxV9RepoReader = influxV9RepoReader; - this.influxV9Utils = influxV9Utils; - this.persistUtils = persistUtils; - - } - - @Override - public List findById(String tenantId, String alarmId, String offset, - int limit) - throws Exception { - - String q = String.format("select alarm_id, metrics, old_state, new_state, " - + "reason, reason_data, sub_alarms " - + "from alarm_state_history " - + "where %1$s %2$s %3$s %4$s", - this.influxV9Utils.publicTenantIdPart(tenantId), - this.influxV9Utils.alarmIdPart(alarmId), - this.influxV9Utils.timeOffsetPart(offset), - this.influxV9Utils.limitPart(limit)); - - logger.debug("Alarm state history query: {}", q); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List alarmStateHistoryList = alarmStateHistoryList(series); - - logger.debug("Found {} alarm state transitions matching query", alarmStateHistoryList.size()); - - return alarmStateHistoryList; - } - - @Override - public List find(String tenantId, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, - @Nullable String offset, int limit) throws Exception { - - List alarmIdList = this.utils.findAlarmIds(tenantId, dimensions); - - if (alarmIdList == null || alarmIdList.isEmpty()) { - return new ArrayList<>(); - } - - String q = String.format("select alarm_id, metrics, old_state, new_state, " - + "reason, reason_data, sub_alarms " - + "from alarm_state_history " - + "where %1$s %2$s %3$s %4$s %5$s", - this.influxV9Utils.publicTenantIdPart(tenantId), - this.influxV9Utils.startTimeEndTimePart(startTime, endTime), - this.influxV9Utils.alarmIdsPart(alarmIdList), - this.influxV9Utils.timeOffsetPart(offset), - this.influxV9Utils.limitPart(limit)); - - logger.debug("Alarm state history list query: {}", q); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List alarmStateHistoryList = alarmStateHistoryList(series); - - logger.debug("Found {} alarm state transitions matching query", alarmStateHistoryList.size()); - - return alarmStateHistoryList; - - } - - private List alarmStateHistoryList(Series series) { - - List alarmStateHistoryList = new LinkedList<>(); - - if (!series.isEmpty()) { - - for (Serie serie : series.getSeries()) { - - for (String[] values : serie.getValues()) { - - AlarmStateHistory alarmStateHistory = new AlarmStateHistory(); - - Date date; - try { - date = this.persistUtils.parseTimestamp(values[0]); - } catch (ParseException e) { - logger.error("Failed to parse time", e); - continue; - } - - DateTime dateTime = new DateTime(date.getTime(), DateTimeZone.UTC); - alarmStateHistory.setTimestamp(dateTime); - - alarmStateHistory.setAlarmId(values[1]); - - List metricDefinitionList; - try { - metricDefinitionList = this.objectMapper.readValue(values[2], METRICS_TYPE); - } catch (IOException e) { - logger.error("Failed to parse metrics", e); - continue; - } - - alarmStateHistory.setMetrics(metricDefinitionList); - - alarmStateHistory.setOldState(AlarmState.valueOf(values[3])); - alarmStateHistory.setNewState(AlarmState.valueOf(values[4])); - alarmStateHistory.setReason(values[5]); - alarmStateHistory.setReasonData(values[6]); - - List subAlarmList; - try { - subAlarmList = this.objectMapper.readValue(values[7], SUB_ALARMS_TYPE); - } catch (IOException e) { - logger.error("Failed to parse sub-alarms", e); - continue; - } - - alarmStateHistory.setSubAlarms(subAlarmList); - - alarmStateHistoryList.add(alarmStateHistory); - } - } - - } - return alarmStateHistoryList; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9DimensionRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9DimensionRepo.java deleted file mode 100644 index 756a00901..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9DimensionRepo.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.inject.Inject; -import com.google.common.base.Strings; -import com.fasterxml.jackson.databind.ObjectMapper; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.TreeSet; -import java.util.Set; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.dimension.DimensionName; -import monasca.api.domain.model.dimension.DimensionValue; -import monasca.api.domain.model.dimension.DimensionRepo; - - -public class InfluxV9DimensionRepo implements DimensionRepo { - - private static final Logger logger = LoggerFactory.getLogger(InfluxV9DimensionRepo.class); - - private final ApiConfig config; - private final InfluxV9RepoReader influxV9RepoReader; - private final InfluxV9Utils influxV9Utils; - private final String region; - - private final ObjectMapper objectMapper = new ObjectMapper(); - - @Inject - public InfluxV9DimensionRepo(ApiConfig config, - InfluxV9RepoReader influxV9RepoReader, - InfluxV9Utils influxV9Utils) { - this.config = config; - this.region = config.region; - this.influxV9RepoReader = influxV9RepoReader; - this.influxV9Utils = influxV9Utils; - } - - @Override - public List findValues( - String metricName, - String tenantId, - String dimensionName, - String offset, - int limit) throws Exception - { - // - // Use treeset to keep list in alphabetic/predictable order - // for string based offset. - // - List dimensionValueList = new ArrayList<>(); - Set matchingValues = new TreeSet(); - String dimNamePart = "and \"" - + this.influxV9Utils.sanitize(dimensionName) - + "\" =~ /.*/"; - - String q = String.format("show series %1$s where %2$s %3$s", - this.influxV9Utils.namePart(metricName, false), - this.influxV9Utils.privateTenantIdPart(tenantId), - dimNamePart); - - logger.debug("Dimension values query: {}", q); - String r = this.influxV9RepoReader.read(q); - Series series = this.objectMapper.readValue(r, Series.class); - - if (!series.isEmpty()) { - for (Serie serie : series.getSeries()) { - for (String[] values : serie.getValues()) { - Map dimensions = this.influxV9Utils.getDimensions(values, serie.getColumns()); - for (Map.Entry entry : dimensions.entrySet()) { - if (dimensionName.equals(entry.getKey())) { - matchingValues.add(entry.getValue()); - } - } - } - } - } - - List filteredValues = filterDimensionValues(matchingValues, - limit, - offset); - - for (String filteredValue : filteredValues) { - DimensionValue dimValue = new DimensionValue(metricName, dimensionName, filteredValue); - dimensionValueList.add(dimValue); - } - - return dimensionValueList; - } - - private List filterDimensionValues(Set matchingValues, - int limit, - String offset) - { - Boolean haveOffset = !Strings.isNullOrEmpty(offset); - List filteredValues = new ArrayList(); - int remaining_limit = limit + 1; - - for (String dimVal : matchingValues) { - if (remaining_limit <= 0) { - break; - } - if (haveOffset && dimVal.compareTo(offset) <= 0) { - continue; - } - filteredValues.add(dimVal); - remaining_limit--; - } - - return filteredValues; - } - - @Override - public List findNames( - String metricName, - String tenantId, - String offset, - int limit) throws Exception - { - // - // Use treeset to keep list in alphabetic/predictable order - // for string based offset. - // - List dimensionNameList = new ArrayList<>(); - Set matchingNames = new TreeSet(); - - String q = String.format("show series %1$s where %2$s", - this.influxV9Utils.namePart(metricName, false), - this.influxV9Utils.privateTenantIdPart(tenantId)); - - logger.debug("Dimension names query: {}", q); - String r = this.influxV9RepoReader.read(q); - Series series = this.objectMapper.readValue(r, Series.class); - - if (!series.isEmpty()) { - for (Serie serie : series.getSeries()) { - for (String[] names : serie.getValues()) { - Map dimensions = this.influxV9Utils.getDimensions(names, serie.getColumns()); - for (Map.Entry entry : dimensions.entrySet()) { - matchingNames.add(entry.getKey()); - } - } - } - } - - List filteredNames = filterDimensionNames(matchingNames, limit, offset); - - for (String filteredName : filteredNames) { - DimensionName dimName = new DimensionName(metricName, filteredName); - dimensionNameList.add(dimName); - } - - return dimensionNameList; - } - - private List filterDimensionNames(Set matchingNames, - int limit, - String offset) { - Boolean haveOffset = !Strings.isNullOrEmpty(offset); - List filteredNames = new ArrayList(); - int remaining_limit = limit + 1; - - for (String dimName : matchingNames) { - if (remaining_limit <= 0) { - break; - } - if (haveOffset && dimName.compareTo(offset) <= 0) { - continue; - } - filteredNames.add(dimName); - remaining_limit--; - } - - return filteredNames; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MeasurementRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MeasurementRepo.java deleted file mode 100644 index d5117e896..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MeasurementRepo.java +++ /dev/null @@ -1,234 +0,0 @@ -/* - * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.common.base.Joiner; -import com.google.inject.Inject; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; - -import monasca.api.ApiConfig; -import monasca.api.domain.exception.MultipleMetricsException; -import monasca.api.domain.model.measurement.MeasurementRepo; -import monasca.api.domain.model.measurement.Measurements; - -public class InfluxV9MeasurementRepo implements MeasurementRepo { - - private static final Logger logger = LoggerFactory - .getLogger(InfluxV9MeasurementRepo.class); - - private final static TypeReference VALUE_META_TYPE = new TypeReference>() {}; - private final static Joiner COMMA_JOINER = Joiner.on(','); - - private final ApiConfig config; - private final String region; - private final InfluxV9RepoReader influxV9RepoReader; - private final InfluxV9Utils influxV9Utils; - private final InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo; - private final ObjectMapper objectMapper = new ObjectMapper(); - - - - @Inject - public InfluxV9MeasurementRepo(ApiConfig config, - InfluxV9RepoReader influxV9RepoReader, - InfluxV9Utils influxV9Utils, - InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo) { - this.config = config; - this.region = config.region; - this.influxV9RepoReader = influxV9RepoReader; - this.influxV9Utils = influxV9Utils; - this.influxV9MetricDefinitionRepo = influxV9MetricDefinitionRepo; - - } - - @Override - public List find(String tenantId, String name, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, - @Nullable String offset, int limit, Boolean mergeMetricsFlag, - List groupBy) - throws Exception { - - String q = buildQuery(tenantId, name, dimensions, startTime, endTime, - offset, limit, mergeMetricsFlag, groupBy); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List measurementsList = measurementsList(series, groupBy, offset, limit); - - logger.debug("Found {} metrics matching query", measurementsList.size()); - - return measurementsList; - } - - private String buildQuery(String tenantId, String name, Map dimensions, - DateTime startTime, DateTime endTime, String offset, int limit, - Boolean mergeMetricsFlag, List groupBy) throws Exception { - - String q; - if (!groupBy.isEmpty()) { - - // The time column is automatically included in the results before all other columns. - q = String.format("select value, value_meta %1$s " - + "where %2$s %3$s %4$s %5$s %6$s %7$s", //slimit 1 - this.influxV9Utils.namePart(name, true), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.startTimePart(startTime), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.endTimePart(endTime), - this.influxV9Utils.groupByPart(groupBy)); - - } else { - - String groupByStr = ""; - if (Boolean.FALSE.equals(mergeMetricsFlag) && - !this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) { - - throw new MultipleMetricsException(name, dimensions); - - } else if (Boolean.FALSE.equals(mergeMetricsFlag)) { - groupByStr = " group by * "; - } - - // The time column is automatically included in the results before all other columns. - q = String.format("select value, value_meta %1$s " - + "where %2$s %3$s %4$s %5$s %6$s %7$s", - this.influxV9Utils.namePart(name, true), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.startTimePart(startTime), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.endTimePart(endTime), - groupByStr); - } - - logger.debug("Measurements query: {}", q); - - return q; - } - - private List measurementsList(Series series, List groupBy, String offsetStr, int limit) { - List measurementsList = new LinkedList<>(); - - if (!series.isEmpty()) { - - int offsetId = 0; - String offsetTimestamp = "1970-01-01T00:00:00.000Z"; - - if (offsetStr != null) { - List offsets = influxV9Utils.parseMultiOffset(offsetStr); - if (offsets.size() > 1) { - offsetId = Integer.parseInt(offsets.get(0)); - offsetTimestamp = offsets.get(1); - } else { - offsetId = 0; - offsetTimestamp = offsets.get(0); - } - } - - int remaining_limit = limit; - int index = 0; - for (Serie serie : series.getSeries()) { - if (index < offsetId || remaining_limit <= 0) { - index++; - continue; - } - - Measurements lastMeasurements = null; - Measurements measurements = null; - - if (!groupBy.isEmpty()) { - Map dimensions = influxV9Utils.filterGroupByTags( - influxV9Utils.filterPrivateTags(serie.getTags()), - groupBy); - - lastMeasurements = measurementsList.size() > 0 ? - measurementsList.get(measurementsList.size() - 1) : null; - - - if (lastMeasurements != null && lastMeasurements.getDimensions().equals(dimensions)) - measurements = measurementsList.get(measurementsList.size() - 1); - - } - - if (measurements == null){ - measurements = new Measurements(serie.getName(), - influxV9Utils.filterPrivateTags(serie.getTags())); - - measurements.setId(Integer.toString(index)); - } - - - - for (String[] values : serie.getValues()) { - if (remaining_limit <= 0) { - break; - } - - final String timestamp = influxV9Utils.threeDigitMillisTimestamp(values[0]); - if (timestamp.compareTo(offsetTimestamp) > 0 || index > offsetId) { - measurements.addMeasurement(Arrays.asList(timestamp, - Double.parseDouble(values[1]), - getValueMeta(values))); - remaining_limit--; - } - } - - if (measurements != lastMeasurements && measurements.getMeasurements().size() > 0) { - measurementsList.add(measurements); - } - index++; - } - } - - return measurementsList; - - } - - private Map getValueMeta(String[] values) { - - Map valueMetaMap = new HashMap<>(); - - String valueMetaStr = values[2]; - - if (valueMetaStr != null && !valueMetaStr.isEmpty()) { - - try { - valueMetaMap = this.objectMapper.readValue(valueMetaStr, VALUE_META_TYPE); - } catch (IOException e) { - logger.error("Failed to parse value metadata: {}", values[2], e); - - } - } - - return valueMetaMap; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MetricDefinitionRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MetricDefinitionRepo.java deleted file mode 100644 index 2a61dbbe9..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9MetricDefinitionRepo.java +++ /dev/null @@ -1,295 +0,0 @@ -/* - * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.common.base.Strings; -import com.google.inject.Inject; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.TreeSet; -import java.util.Set; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.domain.model.metric.MetricName; -import monasca.common.model.metric.MetricDefinition; - - -public class InfluxV9MetricDefinitionRepo implements MetricDefinitionRepo { - - private static final Logger logger = LoggerFactory.getLogger(InfluxV9MetricDefinitionRepo.class); - - private final ApiConfig config; - private final InfluxV9RepoReader influxV9RepoReader; - private final InfluxV9Utils influxV9Utils; - private final String region; - - private final ObjectMapper objectMapper = new ObjectMapper(); - - @Inject - public InfluxV9MetricDefinitionRepo(ApiConfig config, - InfluxV9RepoReader influxV9RepoReader, - InfluxV9Utils influxV9Utils) { - this.config = config; - this.region = config.region; - this.influxV9RepoReader = influxV9RepoReader; - this.influxV9Utils = influxV9Utils; - - } - - boolean isAtMostOneSeries(String tenantId, String name, Map dimensions) - throws Exception { - - // Set limit to 2. We only care if we get 0, 1, or 2 results back. - String q = String.format("show series %1$s " - + "where %2$s %3$s %4$s limit 2", - this.influxV9Utils.namePart(name, false), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.dimPart(dimensions)); - - logger.debug("Metric definition query: {}", q); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List metricDefinitionList = metricDefinitionList(series, - tenantId, - name, - null, - null, - 0); - - logger.debug("Found {} metric definitions matching query", metricDefinitionList.size()); - - return metricDefinitionList.size() > 1 ? false : true; - - } - - @Override - public List find(String tenantId, String name, - Map dimensions, - DateTime startTime, - DateTime endTime, - String offset, int limit) throws Exception { - - int startIndex = this.influxV9Utils.startIndex(offset); - - String q = String.format("show series %1$s " - + "where %2$s %3$s %4$s %5$s %6$s", - this.influxV9Utils.namePart(name, false), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.limitPart(limit), - this.influxV9Utils.offsetPart(startIndex)); - - logger.debug("Metric definition query: {}", q); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List metricDefinitionList = metricDefinitionList(series, - tenantId, - name, - startTime, - endTime, - startIndex); - - logger.debug("Found {} metric definitions matching query", metricDefinitionList.size()); - - return metricDefinitionList; - } - - @Override - public List findNames(String tenantId, Map dimensions, - String offset, int limit) throws Exception { - // - // Use treeset to keep list in alphabetic/predictable order - // for string based offset. - // - List metricNameList = new ArrayList<>(); - Set matchingNames = new TreeSet<>(); - - String q = String.format("show series " - + "where %1$s %2$s %3$s", - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.dimPart(dimensions)); - - logger.debug("Metric name query: {}", q); - String r = this.influxV9RepoReader.read(q); - Series series = this.objectMapper.readValue(r, Series.class); - if (!series.isEmpty()) { - for (Serie serie : series.getSeries()) { - matchingNames.add(serie.getName()); - } - } - - List filteredNames = filterMetricNames(matchingNames, limit, offset); - - for (String filteredName : filteredNames) { - MetricName dimName = new MetricName(filteredName); - metricNameList.add(dimName); - } - logger.debug("Found {} metric definitions matching query", metricNameList.size()); - - return metricNameList; - } - - private List filterMetricNames(Set matchingNames, - int limit, - String offset) { - Boolean haveOffset = !Strings.isNullOrEmpty(offset); - List filteredNames = new ArrayList<>(); - int remaining_limit = limit + 1; - - for (String dimName : matchingNames) { - if (remaining_limit <= 0) { - break; - } - if (haveOffset && dimName.compareTo(offset) <= 0) { - continue; - } - filteredNames.add(dimName); - remaining_limit--; - } - - return filteredNames; - } - - private List metricDefinitionList(Series series, - String tenantId, - String name, - DateTime startTime, - DateTime endTime, - int startIndex) - { - - List metricDefinitionList = new ArrayList<>(); - - if (!series.isEmpty()) { - - int index = startIndex; - - for (Serie serie : series.getSeries()) { - - for (String[] values : serie.getValues()) { - - MetricDefinition m = new MetricDefinition(serie.getName(), - this.influxV9Utils.getDimensions(values, serie.getColumns())); - // - // If start/end time are specified, ensure we've got measurements - // for this definition before we add to the return list - // - if (hasMeasurements(m, tenantId, startTime, endTime)) { - m.setId(String.valueOf(index++)); - metricDefinitionList.add(m); - } - } - } - } - - return metricDefinitionList; - } - - private List metricNameList(Series series) { - List metricNameList = new ArrayList<>(); - - if (!series.isEmpty()) { - - Serie serie = series.getSeries()[0]; - - for (String[] values : serie.getValues()) { - MetricName m = new MetricName(values[0]); - metricNameList.add(m); - } - - } - - return metricNameList; - } - - private boolean hasMeasurements(MetricDefinition m, - String tenantId, - DateTime startTime, - DateTime endTime) - { - boolean hasMeasurements = true; - - // - // Only make the additional query if startTime has been - // specified. - // - if (startTime == null) { - return hasMeasurements; - } - - try { - - String q = buildMeasurementsQuery(tenantId, - m.name, - m.dimensions, - startTime, - endTime); - - String r = this.influxV9RepoReader.read(q); - Series series = this.objectMapper.readValue(r, Series.class); - hasMeasurements = !series.isEmpty(); - - } catch (Exception e) { - // - // If something goes wrong with the measurements query - // checking if there are current measurements, default to - // existing behavior and return the definition. - // - logger.error("Failed to query for measurements for: {}", m.name, e); - hasMeasurements = true; - } - - return hasMeasurements; - } - - private String buildMeasurementsQuery(String tenantId, - String name, - Map dimensions, - DateTime startTime, - DateTime endTime) throws Exception - { - - String q = String.format("select value, value_meta %1$s " - + "where %2$s %3$s %4$s %5$s %6$s group by * slimit 1", - this.influxV9Utils.namePart(name, true), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.startTimePart(startTime), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.endTimePart(endTime)); - - logger.debug("Measurements query: {}", q); - - return q; - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9RepoReader.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9RepoReader.java deleted file mode 100644 index 5edb0b952..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9RepoReader.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.inject.Inject; - -import org.apache.commons.codec.binary.Base64; -import org.apache.http.Header; -import org.apache.http.HeaderElement; -import org.apache.http.HttpEntity; -import org.apache.http.HttpException; -import org.apache.http.HttpRequest; -import org.apache.http.HttpRequestInterceptor; -import org.apache.http.HttpResponse; -import org.apache.http.HttpResponseInterceptor; -import org.apache.http.HttpStatus; -import org.apache.http.client.entity.GzipDecompressingEntity; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.apache.http.protocol.HttpContext; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.net.URLEncoder; - -import monasca.api.ApiConfig; - -public class InfluxV9RepoReader { - - private static final Logger logger = LoggerFactory.getLogger(InfluxV9RepoReader.class); - - private final String influxName; - private final String influxUrl; - private final String influxCreds; - private final String influxUser; - private final String influxPass; - private final String baseAuthHeader; - private final boolean gzip; - - private final CloseableHttpClient httpClient; - - @Inject - public InfluxV9RepoReader(final ApiConfig config) { - - this.influxName = config.influxDB.getName(); - logger.debug("Influxdb database name: {}", this.influxName); - - this.influxUrl = config.influxDB.getUrl() + "/query"; - logger.debug("Influxdb URL: {}", this.influxUrl); - - this.influxUser = config.influxDB.getUser(); - this.influxPass = config.influxDB.getPassword(); - this.influxCreds = this.influxUser + ":" + this.influxPass; - - this.gzip = config.influxDB.getGzip(); - logger.debug("Influxdb gzip responses: {}", this.gzip); - - logger.debug("Setting up basic Base64 authentication"); - this.baseAuthHeader = "Basic " + new String(Base64.encodeBase64(this.influxCreds.getBytes())); - - // We inject InfluxV9RepoReader as a singleton. So, we must share connections safely. - PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(); - cm.setMaxTotal(config.influxDB.getMaxHttpConnections()); - - if (this.gzip) { - - logger.debug("Setting up gzip responses from Influxdb"); - - this.httpClient = - HttpClients.custom().setConnectionManager(cm) - .addInterceptorFirst(new HttpRequestInterceptor() { - - public void process(final HttpRequest request, final HttpContext context) - throws HttpException, IOException { - if (!request.containsHeader("Accept-Encoding")) { - request.addHeader("Accept-Encoding", "gzip"); - } - } - }).addInterceptorFirst(new HttpResponseInterceptor() { - - public void process(final HttpResponse response, final HttpContext context) - throws HttpException, IOException { - HttpEntity entity = response.getEntity(); - if (entity != null) { - Header ceheader = entity.getContentEncoding(); - if (ceheader != null) { - HeaderElement[] codecs = ceheader.getElements(); - for (int i = 0; i < codecs.length; i++) { - if (codecs[i].getName().equalsIgnoreCase("gzip")) { - response.setEntity(new GzipDecompressingEntity(response.getEntity())); - return; - } - } - } - } - } - }).build(); - - } else { - - logger.debug("Setting up non-gzip responses from Influxdb"); - - this.httpClient = HttpClients.custom().setConnectionManager(cm).build(); - - } - } - - protected String read(final String query) throws Exception { - - HttpGet request = new HttpGet(this.influxUrl + "?q=" + URLEncoder.encode(query, "UTF-8") - + "&db=" + URLEncoder.encode(this.influxName, "UTF-8")); - - request.addHeader("content-type", "application/json"); - request.addHeader("Authorization", this.baseAuthHeader); - - try { - - logger.debug("Sending query {} to influx database {} at {}", query, this.influxName, - this.influxUrl); - - HttpResponse response = this.httpClient.execute(request); - - int rc = response.getStatusLine().getStatusCode(); - - logger.debug("Received {} status code from influx database {} at {}", rc, this.influxName, - this.influxUrl); - - if (rc != HttpStatus.SC_OK) { - - HttpEntity entity = response.getEntity(); - String responseString = EntityUtils.toString(entity, "UTF-8"); - logger - .error("Failed to query influx database {} at {}: {}", this.influxName, this.influxUrl, - String.valueOf(rc)); - logger.error("Http response: {}", responseString); - - throw new Exception(rc + ":" + responseString); - } - - logger - .debug("Successfully queried influx database {} at {}", this.influxName, this.influxUrl); - - HttpEntity entity = response.getEntity(); - return entity != null ? EntityUtils.toString(entity, "UTF-8") : null; - - } finally { - - request.releaseConnection(); - - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9StatisticRepo.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9StatisticRepo.java deleted file mode 100644 index cb72458d1..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9StatisticRepo.java +++ /dev/null @@ -1,279 +0,0 @@ -/* - * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.common.base.Strings; -import com.google.inject.Inject; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; - -import monasca.api.ApiConfig; -import monasca.api.domain.exception.MultipleMetricsException; -import monasca.api.domain.model.statistic.StatisticRepo; -import monasca.api.domain.model.statistic.Statistics; - - -public class InfluxV9StatisticRepo implements StatisticRepo { - - - private static final Logger logger = LoggerFactory.getLogger(InfluxV9StatisticRepo.class); - - private final ApiConfig config; - private final String region; - private final InfluxV9RepoReader influxV9RepoReader; - private final InfluxV9Utils influxV9Utils; - private final InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo; - private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat - .dateOptionalTimeParser().withZoneUTC(); - - - private final ObjectMapper objectMapper = new ObjectMapper(); - - - @Inject - public InfluxV9StatisticRepo(ApiConfig config, - InfluxV9RepoReader influxV9RepoReader, - InfluxV9Utils influxV9Utils, - InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo) { - this.config = config; - this.region = config.region; - this.influxV9RepoReader = influxV9RepoReader; - this.influxV9Utils = influxV9Utils; - this.influxV9MetricDefinitionRepo = influxV9MetricDefinitionRepo; - - } - - @Override - public List find(String tenantId, String name, Map dimensions, - DateTime startTime, @Nullable DateTime endTime, - List statistics, int period, String offset, int limit, - Boolean mergeMetricsFlag, List groupBy) throws Exception { - - String offsetTimePart = ""; - if (!Strings.isNullOrEmpty(offset)) { - int indexOfUnderscore = offset.indexOf('_'); - if (indexOfUnderscore > -1) { - offsetTimePart = offset.substring(indexOfUnderscore + 1); - // Add the period minus one millisecond to the offset - // to ensure only the next group of points are returned - DateTime offsetDateTime = DateTime.parse(offsetTimePart).plusSeconds(period).minusMillis(1); - // leave out any ID, as influx doesn't understand it - offset = offsetDateTime.toString(); - } - } - - String q = buildQuery(tenantId, name, dimensions, startTime, endTime, - statistics, period, offset, limit, mergeMetricsFlag, groupBy); - - String r = this.influxV9RepoReader.read(q); - - Series series = this.objectMapper.readValue(r, Series.class); - - List statisticsList = statisticslist(series, offset, limit); - - logger.debug("Found {} metric definitions matching query", statisticsList.size()); - - return statisticsList; - - } - - private String buildQuery(String tenantId, String name, Map dimensions, - DateTime startTime, DateTime endTime, List statistics, - int period, String offset, int limit, Boolean mergeMetricsFlag, - List groupBy) - throws Exception { - - String offsetTimePart = ""; - if (!Strings.isNullOrEmpty(offset)) { - int indexOfUnderscore = offset.indexOf('_'); - offsetTimePart = offset.substring(indexOfUnderscore + 1); - } - - String q; - - if (!groupBy.isEmpty()) { - - q = String.format("select %1$s %2$s " - + "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s", - funcPart(statistics), - this.influxV9Utils.namePart(name, true), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.startTimePart(startTime), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.endTimePart(endTime), - this.influxV9Utils.timeOffsetPart(offsetTimePart), - this.influxV9Utils.periodPartWithGroupBy(period, groupBy), - this.influxV9Utils.limitPart(limit)); - } else { - - if (Boolean.FALSE.equals(mergeMetricsFlag) && - !this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) { - - throw new MultipleMetricsException(name, dimensions); - - } - - q = String.format("select %1$s %2$s " - + "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s", - funcPart(statistics), - this.influxV9Utils.namePart(name, true), - this.influxV9Utils.privateTenantIdPart(tenantId), - this.influxV9Utils.privateRegionPart(this.region), - this.influxV9Utils.startTimePart(startTime), - this.influxV9Utils.dimPart(dimensions), - this.influxV9Utils.endTimePart(endTime), - this.influxV9Utils.timeOffsetPart(offsetTimePart), - this.influxV9Utils.periodPart(period, mergeMetricsFlag), - this.influxV9Utils.limitPart(limit)); - - } - - logger.debug("Statistics query: {}", q); - - return q; - } - - private List statisticslist(Series series, String offsetStr, int limit) { - - int offsetId = 0; - String offsetTimestamp = "1970-01-01T00:00:00.000Z"; - - if (offsetStr != null) { - List offsets = influxV9Utils.parseMultiOffset(offsetStr); - if (offsets.size() > 1) { - offsetId = Integer.parseInt(offsets.get(0)); - offsetTimestamp = offsets.get(1); - } else { - offsetId = 0; - offsetTimestamp = offsets.get(0); - } - } - - List statisticsList = new LinkedList<>(); - - if (!series.isEmpty()) { - - int remaining_limit = limit; - int index = 0; - for (Serie serie : series.getSeries()) { - if (index < offsetId || remaining_limit <= 0) { - index++; - continue; - } - - Statistics statistics = new Statistics(serie.getName(), - this.influxV9Utils.filterPrivateTags(serie.getTags()), - Arrays.asList(translateNames(serie.getColumns()))); - statistics.setId(Integer.toString(index)); - - - for (Object[] valueObjects : serie.getValues()) { - if (remaining_limit <= 0) { - break; - } - - List values = buildValsList(valueObjects); - if (values == null) - continue; - - if (((String) values.get(0)).compareTo(offsetTimestamp) >= 0 || index > offsetId) { - statistics.addMeasurement(values); - remaining_limit--; - } - } - - if (statistics.getMeasurements().size() > 0) { - statisticsList.add(statistics); - } - index++; - - } - - } - - return statisticsList; - } - - private List buildValsList(Object[] values) { - - ArrayList valObjArryList = new ArrayList<>(); - - // First value is the timestamp. - String timestamp = values[0].toString(); - int index = timestamp.indexOf('.'); - if (index > 0) - // In certain queries, timestamps will not align to second resolution, - // remove the sub-second values. - valObjArryList.add(timestamp.substring(0,index).concat("Z")); - else - valObjArryList.add(timestamp); - - // All other values are doubles or nulls. - for (int i = 1; i < values.length; ++i) { - if (values[i] != null) { - valObjArryList.add(Double.parseDouble((String) values[i])); - } else { - return null; - } - } - - return valObjArryList; - } - - private String[] translateNames(String[] columnNamesArry) { - - for (int i = 0; i < columnNamesArry.length; i++) { - - columnNamesArry[i] = columnNamesArry[i].replaceAll("^time$", "timestamp"); - columnNamesArry[i] = columnNamesArry[i].replaceAll("^mean$", "avg"); - - } - - return columnNamesArry; - } - - private String funcPart(List statistics) { - - StringBuilder sb = new StringBuilder(); - - for (String stat : statistics) { - if (sb.length() != 0) { - sb.append(","); - } - - if (stat.trim().toLowerCase().equals("avg")) { - sb.append("mean(value)"); - } else { - sb.append(String.format("%1$s(value)", stat)); - } - } - - return sb.toString(); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9Utils.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9Utils.java deleted file mode 100644 index 6e495bdb6..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9Utils.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import org.apache.commons.lang3.StringUtils; -import org.joda.time.DateTime; -import org.joda.time.format.ISODateTimeFormat; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import monasca.common.util.Conversions; - -public class InfluxV9Utils { - private static final Pattern sqlUnsafePattern = Pattern.compile("^.*('|;|\")+.*$"); - - static final String OFFSET_SEPARATOR = "_"; - static final Splitter - offsetSplitter = Splitter.on(OFFSET_SEPARATOR).omitEmptyStrings().trimResults(); - static final Joiner COMMA_JOINER = Joiner.on(','); - - public InfluxV9Utils() { - } - - public String sanitize(final String taintedString) { - - Matcher m = sqlUnsafePattern.matcher(taintedString); - - if (m.matches()) { - - throw new IllegalArgumentException(String.format("Input from user contains single quote ['] or " - + "semi-colon [;] or double quote [\"] characters[ %1$s ]", - taintedString)); - } - - return taintedString; - } - - String buildTimePart(final DateTime startTime, final DateTime endTime) { - final StringBuilder sb = new StringBuilder(); - - if (startTime != null) { - sb.append(String.format(" and time >= " + "'" + ISODateTimeFormat.dateTime().print(startTime) - + "'")); - } - - if (endTime != null) { - sb.append(String.format(" and time <= " + "'" + ISODateTimeFormat.dateTime().print(endTime) - + "'")); - } - - return sb.toString(); - } - - public String buildAlarmsPart(List alarmIds) { - - StringBuilder sb = new StringBuilder(); - for (String alarmId : alarmIds) { - if (sb.length() > 0) { - sb.append(" or "); - } - sb.append(String.format(" alarm_id = '%1$s' ", alarmId)); - } - - if (sb.length() > 0) { - sb.insert(0, " and ("); - sb.insert(sb.length(), ")"); - } - return sb.toString(); - } - - public String groupByPart(List groupBy) { - - if (!groupBy.isEmpty() && !groupBy.contains("*")) - return " group by " + COMMA_JOINER.join(groupBy) + ' '; - return "group by * "; - - } - - public String namePart(String name, boolean isRequired) { - - if (isRequired) { - if (name == null || name.isEmpty()) { - throw new IllegalArgumentException(String.format("Found null or empty name: %1$s", name)); - } - } - - if (name == null || name.isEmpty()) { - return ""; - } else { - return String.format(" from \"%1$s\"", sanitize(name)); - } - } - - public String publicTenantIdPart(String tenantId) { - - if (tenantId == null || tenantId.isEmpty()) { - throw new IllegalArgumentException(String.format("Found null or empty tenant id: %1$s", tenantId)); - } - - return " tenant_id=" + "'" + sanitize(tenantId) + "'"; - - } - - public String privateTenantIdPart(String tenantId) { - - if (tenantId == null || tenantId.isEmpty()) { - throw new IllegalArgumentException(String.format("Found null or empty tenant id: %1$s", tenantId)); - } - - return " _tenant_id=" + "'" + sanitize(tenantId) + "'"; - - } - - public String alarmIdPart(String alarmId) { - - if (alarmId == null || alarmId.isEmpty()) { - return ""; - } - - return " and alarm_id=" + "'" + alarmId + "'"; - } - - - public String timeOffsetPart(String offset) { - - if (StringUtils.isEmpty(offset)) { - return StringUtils.EMPTY; - } - if(!"0".equals(offset)){ - Object convertible; - try { - convertible = Long.valueOf(offset); - } catch (IllegalArgumentException exp) { - // not a numeric value - convertible = offset; - } - offset = Conversions.variantToDateTime(convertible).toString(ISODateTimeFormat.dateTime()); - } - - return String.format(" and time > '%1$s'", offset); - } - - public String privateRegionPart(String region) { - - if (region == null || region.isEmpty()) { - throw new IllegalArgumentException(String.format("Found null or empty region: %1$s", region)); - } - - return " and _region=" + "'" + sanitize(region) + "'"; - - } - - public String dimPart(Map dims) { - - StringBuilder sb = new StringBuilder(); - - if (dims != null && !dims.isEmpty()) { - for (String k : dims.keySet()) { - String v = dims.get(k); - if (k != null && !k.isEmpty()) { - sb.append(" and \"" + sanitize(k) + "\""); - if (Strings.isNullOrEmpty(v)) { - sb.append("=~ /.*/"); - } else if (v.contains("|")) { - sb.append("=~ " + "/^" + sanitize(v) + "$/"); - } else { - sb.append("= " + "'" + sanitize(v) + "'"); - } - } - } - } - - return sb.toString(); - } - - public String startTimePart(DateTime startTime) { - - return startTime != null ? " and time > " + "'" + ISODateTimeFormat.dateTime().print(startTime) - + "'" : ""; - } - - public String endTimePart(DateTime endTime) { - - return endTime != null ? " and time < " + "'" + ISODateTimeFormat.dateTime().print(endTime) - + "'" : ""; - } - - public String limitPart(int limit) { - - // We add 1 to limit to determine if we need to insert a next link. - return String.format(" limit %1$d", limit + 1); - } - - public String offsetPart(int startIndex) { - - return String.format(" offset %1$d", startIndex); - } - - public int startIndex(String offset) { - - if (offset == null || offset.isEmpty()) { - - return 0; - - } - - int intOffset; - - try { - - intOffset = Integer.parseInt(offset); - - } catch (NumberFormatException nfe) { - - throw new IllegalArgumentException( - String.format("Found non-integer offset '%1$s'. Offset must be a positive integer", offset)); - } - - if (intOffset < 0) { - - throw new IllegalArgumentException( - String.format("Found negative offset '%1$s'. Offset must be a positive integer", offset)); - - } - - // We've already returned up to offset, so return offset + 1. - return intOffset + 1; - } - - public String startTimeEndTimePart(DateTime startTime, DateTime endTime) { - - return buildTimePart(startTime, endTime); - } - - public String alarmIdsPart(List alarmIdList) { - - return buildAlarmsPart(alarmIdList); - - } - - public String periodPartWithGroupBy(int period, List groupBy) { - if (period <= 0) { - period = 300; - } - - String periodStr = ",time(" + period + "s)"; - - return String.format(" group by %1$s%2$s", COMMA_JOINER.join(groupBy), periodStr); - } - - public String periodPart(int period, Boolean mergeMetricsFlag) { - String periodStr = period > 0 ? String.format(" group by time(%1$ds)", period) - : " group by time(300s)"; - periodStr += mergeMetricsFlag ? "" : ", *"; - - return periodStr; - } - - Map filterPrivateTags(Map tagMap) { - - Map filteredMap = new HashMap<>(tagMap); - - filteredMap.remove("_tenant_id"); - filteredMap.remove("_region"); - - return filteredMap; - } - - Map filterGroupByTags(Map tagMap, List groupBy) { - Map filteredMap = new HashMap<>(tagMap); - - for (String key : tagMap.keySet()) { - if (!groupBy.contains(key)) - filteredMap.remove(key); - } - - return filteredMap; - } - - public String threeDigitMillisTimestamp(String origTimestamp) { - final int length = origTimestamp.length(); - final String timestamp; - if (length == 20) { - timestamp = origTimestamp.substring(0, 19) + ".000Z"; - } else { - final String millisecond = origTimestamp.substring(20, length - 1); - final String millisecond_3d = StringUtils.rightPad(millisecond, 3, '0'); - timestamp = origTimestamp.substring(0, 19) + '.' + millisecond_3d + 'Z'; - } - return timestamp; - } - - public List parseMultiOffset(String offsetStr) { - return offsetSplitter.splitToList(offsetStr); - } - - public Map getDimensions(String[] vals, String[] cols) { - Map dims = new HashMap<>(); - for (int i = 0; i < cols.length; ++i) { - - // Dimension names that start with underscore are reserved. I.e., _key, _region, _tenant_id. - // Influxdb inserts _key. - // Monasca Persister inserts _region and _tenant_id. - if (!cols[i].startsWith("_")) { - if (!vals[i].equalsIgnoreCase("null")) { - dims.put(cols[i], vals[i]); - } - } - } - return dims; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/Series.java b/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/Series.java deleted file mode 100644 index 6d715e4b5..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/influxdb/Series.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.influxdb; - -import java.util.HashMap; -import java.util.Map; - -public class Series { - - public SeriesElement[] results; - public String error; - - boolean isEmpty() { - - return this.results[0].series == null; - } - - int getSeriesLength() { - - if (!isEmpty()) { - return this.results[0].series.length; - } else { - return 0; - } - } - - Serie[] getSeries() { - - return this.results[0].series; - } - - public String getError() { - - return this.error; - - } -} - -class SeriesElement { - - public Serie[] series; - public String error; - -} - -class Serie { - - // Initialize to defaults to avoid NPE. - public String name = ""; - Map tags = new HashMap(); - public String[] columns = new String[0]; - public String[][] values = new String[0][0]; - - public String getName() { - return name; - } - - public Map getTags() { - return tags; - } - - public String[] getColumns() { - return columns; - } - - public String[][] getValues() { - return values; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepoImpl.java deleted file mode 100644 index 08387ccd8..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepoImpl.java +++ /dev/null @@ -1,448 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.mysql; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.ArrayList; -import java.sql.SQLException; -import java.sql.ResultSet; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.skife.jdbi.v2.tweak.ResultSetMapper; -import org.skife.jdbi.v2.StatementContext; - -import com.google.common.base.Joiner; -import com.google.common.collect.Iterables; - -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.model.alarm.AggregateFunction; -import monasca.common.model.alarm.AlarmOperator; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.common.util.Conversions; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.infrastructure.persistence.DimensionQueries; -import monasca.api.infrastructure.persistence.SubAlarmDefinitionQueries; - -import com.google.common.base.Splitter; -import com.google.common.collect.Lists; - -/** - * Alarm repository implementation. - */ -public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo { - private static final Joiner COMMA_JOINER = Joiner.on(','); - private static final String SUB_ALARM_SQL = - "select sa.*, sad.dimensions from sub_alarm_definition as sa " - + "left join (select sub_alarm_definition_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_definition_dimension group by sub_alarm_definition_id ) as sad " - + "on sad.sub_alarm_definition_id = sa.id where sa.alarm_definition_id = :alarmDefId"; - private static final String CREATE_SUB_EXPRESSION_SQL = "insert into sub_alarm_definition " - + "(id, alarm_definition_id, function, metric_name, " - + "operator, threshold, period, periods, is_deterministic, " - + "created_at, updated_at) " - + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())"; - private static final String UPDATE_SUB_ALARM_DEF_SQL = "update sub_alarm_definition set " - + "operator = ?, threshold = ?, is_deterministic = ?, updated_at = NOW() where id = ?"; - - private final DBI db; - private final PersistUtils persistUtils; - - @Inject - public AlarmDefinitionMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) { - this.db = db; - this.persistUtils = persistUtils; - } - - @Override - public AlarmDefinition create(String tenantId, String id, String name, String description, - String severity, String expression, Map subExpressions, - List matchBy, List alarmActions, List okActions, - List undeterminedActions) { - Handle h = db.open(); - - try { - h.begin(); - h.insert( - "insert into alarm_definition (id, tenant_id, name, description, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) values (?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW(), NULL)", - id, tenantId, name, description, severity, expression, - matchBy == null || Iterables.isEmpty(matchBy) ? null : COMMA_JOINER.join(matchBy), true); - - // Persist sub-alarms - createSubExpressions(h, id, subExpressions); - - // Persist actions - persistActions(h, id, AlarmState.ALARM, alarmActions); - persistActions(h, id, AlarmState.OK, okActions); - persistActions(h, id, AlarmState.UNDETERMINED, undeterminedActions); - - h.commit(); - return new AlarmDefinition(id, name, description, severity, expression, matchBy, true, - alarmActions, okActions == null ? Collections.emptyList() : okActions, - undeterminedActions == null ? Collections.emptyList() : undeterminedActions); - } catch (RuntimeException e) { - h.rollback(); - throw e; - } finally { - h.close(); - } - } - - @Override - public void deleteById(String tenantId, String alarmDefId) { - try (Handle h = db.open()) { - if (h - .update( - "update alarm_definition set deleted_at = NOW() where tenant_id = ? and id = ? and deleted_at is NULL", - tenantId, alarmDefId) == 0) - throw new EntityNotFoundException("No alarm definition exists for %s", alarmDefId); - - // Cascade soft delete to alarms - h.execute("delete from alarm where alarm_definition_id = :id", alarmDefId); - } - } - - @Override - public String exists(String tenantId, String name) { - try (Handle h = db.open()) { - Map map = h - .createQuery( - "select id from alarm_definition where tenant_id = :tenantId and name = :name and deleted_at is NULL") - .bind("tenantId", tenantId).bind("name", name).first(); - if (map != null) { - if (map.values().size() != 0) { - return map.get("id").toString(); - } else { - return null; - } - } else { - return null; - } - } - } - - @SuppressWarnings("unchecked") - @Override - public List find(String tenantId, String name, - Map dimensions, List severities, - List sortBy, String offset, int limit) { - - - try (Handle h = db.open()) { - - String query = - " SELECT t.id, t.tenant_id, t.name, t.description, t.expression, t.severity, t.match_by," - + " t.actions_enabled, t.created_at, t.updated_at, t.deleted_at, " - + " GROUP_CONCAT(aa.alarm_state) AS states, " - + " GROUP_CONCAT(aa.action_id) AS notificationIds " - + "FROM (SELECT distinct ad.id, ad.tenant_id, ad.name, ad.description, ad.expression," - + " ad.severity, ad.match_by, ad.actions_enabled, ad.created_at, " - + " ad.updated_at, ad.deleted_at " - + " FROM alarm_definition AS ad " - + " LEFT OUTER JOIN sub_alarm_definition AS sad ON ad.id = sad.alarm_definition_id " - + " LEFT OUTER JOIN sub_alarm_definition_dimension AS dim ON sad.id = dim.sub_alarm_definition_id %1$s " - + " WHERE ad.tenant_id = :tenantId AND ad.deleted_at IS NULL %2$s) AS t " - + "LEFT OUTER JOIN alarm_action AS aa ON t.id = aa.alarm_definition_id " - + "GROUP BY t.id %3$s %4$s %5$s"; - - StringBuilder sbWhere = new StringBuilder(); - - if (name != null) { - sbWhere.append(" and ad.name = :name"); - } - - sbWhere.append(MySQLUtils.buildSeverityAndClause(severities)); - - String orderByPart = ""; - if (sortBy != null && !sortBy.isEmpty()) { - orderByPart = " order by " + COMMA_JOINER.join(sortBy); - if (!orderByPart.contains("id")) { - orderByPart = orderByPart + ",id"; - } - } else { - orderByPart = " order by id "; - } - - String limitPart = ""; - if (limit > 0) { - limitPart = " limit :limit"; - } - - String offsetPart = ""; - if (offset != null) { - offsetPart = " offset " + offset + ' '; - } - - String sql = String.format(query, - SubAlarmDefinitionQueries.buildJoinClauseFor(dimensions), sbWhere, orderByPart, - limitPart, offsetPart); - - Query q = h.createQuery(sql); - - q.bind("tenantId", tenantId); - - if (name != null) { - q.bind("name", name); - } - - MySQLUtils.bindSeverityToQuery(q, severities); - - if (limit > 0) { - q.bind("limit", limit + 1); - } - - q.registerMapper(new AlarmDefinitionMapper()); - q = q.mapTo(AlarmDefinition.class); - SubAlarmDefinitionQueries.bindDimensionsToQuery(q, dimensions); - List resultSet = (List) q.list(); - return resultSet; - } - } - - @Override - public AlarmDefinition findById(String tenantId, String alarmDefId) { - - try (Handle h = db.open()) { - String query = "SELECT alarm_definition.id, alarm_definition.tenant_id, alarm_definition.name, alarm_definition.description, " - + "alarm_definition.expression, alarm_definition.severity, alarm_definition.match_by, alarm_definition.actions_enabled, " - +" alarm_definition.created_at, alarm_definition.updated_at, alarm_definition.deleted_at, " - + "GROUP_CONCAT(alarm_action.action_id) AS notificationIds,group_concat(alarm_action.alarm_state) AS states " - + "FROM alarm_definition LEFT OUTER JOIN alarm_action ON alarm_definition.id=alarm_action.alarm_definition_id " - + " WHERE alarm_definition.tenant_id=:tenantId AND alarm_definition.id=:alarmDefId AND alarm_definition.deleted_at " - + " IS NULL GROUP BY alarm_definition.id"; - - Query q = h.createQuery(query); - q.bind("tenantId", tenantId); - q.bind("alarmDefId", alarmDefId); - - q.registerMapper(new AlarmDefinitionMapper()); - q = q.mapTo(AlarmDefinition.class); - AlarmDefinition alarmDefinition = (AlarmDefinition) q.first(); - if(alarmDefinition == null) - { - throw new EntityNotFoundException("No alarm definition exists for %s", alarmDefId); - } - return alarmDefinition; - } - } - - @Override - public Map findSubAlarmMetricDefinitions(String alarmDefId) { - try (Handle h = db.open()) { - List> rows = - h.createQuery(SUB_ALARM_SQL).bind("alarmDefId", alarmDefId).list(); - Map subAlarmMetricDefs = new HashMap<>(); - for (Map row : rows) { - String id = (String) row.get("id"); - String metricName = (String) row.get("metric_name"); - Map dimensions = - DimensionQueries.dimensionsFor((String) row.get("dimensions")); - subAlarmMetricDefs.put(id, new MetricDefinition(metricName, dimensions)); - } - - return subAlarmMetricDefs; - } - } - - @Override - public Map findSubExpressions(String alarmDefId) { - try (Handle h = db.open()) { - List> rows = - h.createQuery(SUB_ALARM_SQL).bind("alarmDefId", alarmDefId).list(); - Map subExpressions = new HashMap<>(); - for (Map row : rows) { - String id = (String) row.get("id"); - AggregateFunction function = AggregateFunction.fromJson((String) row.get("function")); - String metricName = (String) row.get("metric_name"); - AlarmOperator operator = AlarmOperator.fromJson((String) row.get("operator")); - Double threshold = (Double) row.get("threshold"); - // MySQL connector returns an Integer, Drizzle returns a Long for period and periods. - // Need to convert the results appropriately based on type. - Integer period = Conversions.variantToInteger(row.get("period")); - Integer periods = Conversions.variantToInteger(row.get("periods")); - Boolean isDeterministic = Conversions.variantToBoolean(row.get("is_deterministic")); - Map dimensions = - DimensionQueries.dimensionsFor((String) row.get("dimensions")); - - subExpressions.put( - id, - new AlarmSubExpression( - function, - new MetricDefinition(metricName, dimensions), - operator, - threshold, - period, - periods, - isDeterministic - ) - ); - - } - - return subExpressions; - } - } - - @Override - public void update(String tenantId, String id, boolean patch, String name, String description, - String expression, List matchBy, String severity, boolean actionsEnabled, - Collection oldSubAlarmIds, Map changedSubAlarms, - Map newSubAlarms, List alarmActions, - List okActions, List undeterminedActions) { - Handle h = db.open(); - - try { - h.begin(); - h.insert( - "update alarm_definition set name = ?, description = ?, expression = ?, match_by = ?, severity = ?, actions_enabled = ?, updated_at = NOW() where tenant_id = ? and id = ?", - name, description, expression, matchBy == null || Iterables.isEmpty(matchBy) ? null - : COMMA_JOINER.join(matchBy), severity, actionsEnabled, tenantId, id); - - // Delete old sub-alarms - if (oldSubAlarmIds != null) - for (String oldSubAlarmId : oldSubAlarmIds) - h.execute("delete from sub_alarm_definition where id = ?", oldSubAlarmId); - - // Update changed sub-alarms - if (changedSubAlarms != null) - for (Map.Entry entry : changedSubAlarms.entrySet()) { - AlarmSubExpression sa = entry.getValue(); - h.execute( - UPDATE_SUB_ALARM_DEF_SQL, - sa.getOperator().name(), - sa.getThreshold(), - sa.isDeterministic(), - entry.getKey() - ); - } - - // Insert new sub-alarms - createSubExpressions(h, id, newSubAlarms); - - // Delete old actions - if (patch) { - deleteActions(h, id, AlarmState.ALARM, alarmActions); - deleteActions(h, id, AlarmState.OK, okActions); - deleteActions(h, id, AlarmState.UNDETERMINED, undeterminedActions); - } else - h.execute("delete from alarm_action where alarm_definition_id = ?", id); - - // Insert new actions - persistActions(h, id, AlarmState.ALARM, alarmActions); - persistActions(h, id, AlarmState.OK, okActions); - persistActions(h, id, AlarmState.UNDETERMINED, undeterminedActions); - - h.commit(); - } catch (RuntimeException e) { - h.rollback(); - throw e; - } finally { - h.close(); - } - } - - private void deleteActions(Handle handle, String id, AlarmState alarmState, List actions) { - if (actions != null) - handle.execute("delete from alarm_action where alarm_definition_id = ? and alarm_state = ?", id, - alarmState.name()); - } - - private void persistActions(Handle handle, String id, AlarmState alarmState, List actions) { - if (actions != null) - for (String action : actions) - handle.insert("insert into alarm_action values (?, ?, ?)", id, alarmState.name(), action); - } - - private void createSubExpressions(Handle handle, String id, - Map alarmSubExpressions) { - if (alarmSubExpressions != null) { - for (Map.Entry subEntry : alarmSubExpressions.entrySet()) { - String subAlarmId = subEntry.getKey(); - AlarmSubExpression subExpr = subEntry.getValue(); - MetricDefinition metricDef = subExpr.getMetricDefinition(); - - // Persist sub-alarm - handle.insert(CREATE_SUB_EXPRESSION_SQL, subAlarmId, id, subExpr.getFunction().name(), - metricDef.name, subExpr.getOperator().name(), subExpr.getThreshold(), - subExpr.getPeriod(), subExpr.getPeriods(), subExpr.isDeterministic()); - - // Persist sub-alarm dimensions - if (metricDef.dimensions != null && !metricDef.dimensions.isEmpty()) - for (Map.Entry dimEntry : metricDef.dimensions.entrySet()) - handle.insert("insert into sub_alarm_definition_dimension values (?, ?, ?)", subAlarmId, - dimEntry.getKey(), dimEntry.getValue()); - } - } - } - - private static class AlarmDefinitionMapper implements ResultSetMapper { - - private static final Splitter - COMMA_SPLITTER = - Splitter.on(',').omitEmptyStrings().trimResults(); - - public AlarmDefinition map(int index, ResultSet r, StatementContext ctx) throws SQLException { - String notificationIds = r.getString("notificationIds"); - String states = r.getString("states"); - String matchBy = r.getString("match_by"); - List notifications = splitStringIntoList(notificationIds); - List state = splitStringIntoList(states); - List match = splitStringIntoList(matchBy); - - List okActionIds = new ArrayList(); - List alarmActionIds = new ArrayList(); - List undeterminedActionIds = new ArrayList(); - - int stateAndActionIndex = 0; - for (String singleState : state) { - if (singleState.equals(AlarmState.UNDETERMINED.name())) { - undeterminedActionIds.add(notifications.get(stateAndActionIndex)); - } - if (singleState.equals(AlarmState.OK.name())) { - okActionIds.add(notifications.get(stateAndActionIndex)); - } - if (singleState.equals(AlarmState.ALARM.name())) { - alarmActionIds.add(notifications.get(stateAndActionIndex)); - } - stateAndActionIndex++; - } - - return new AlarmDefinition(r.getString("id"), r.getString("name"), r.getString("description"), - r.getString("severity"), r.getString("expression"), match, - r.getBoolean("actions_enabled"), alarmActionIds, okActionIds, - undeterminedActionIds); - } - - private List splitStringIntoList(String commaDelimitedString) { - if (commaDelimitedString == null) { - return new ArrayList(); - } - Iterable split = COMMA_SPLITTER.split(commaDelimitedString); - return Lists.newArrayList(split); - } - } -} - diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepoImpl.java deleted file mode 100644 index d80af4524..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepoImpl.java +++ /dev/null @@ -1,653 +0,0 @@ -/* - * Copyright (c) 2014-2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.mysql; - -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmCount; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.infrastructure.persistence.DimensionQueries; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.common.persistence.BeanMapper; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; - -/** - * Alarmed metric repository implementation. - */ -public class AlarmMySqlRepoImpl implements AlarmRepo { - - private static final Joiner COMMA_JOINER = Joiner.on(','); - private static final Splitter SPACE_SPLITTER = Splitter.on(' '); - private static final Logger logger = LoggerFactory.getLogger(AlarmMySqlRepoImpl.class); - - private final DBI db; - private final PersistUtils persistUtils; - - private static final String FIND_ALARM_BY_ID_SQL = - "select ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, " - + "a.id as alarm_id, a.state, a.lifecycle_state, a.link, a.state_updated_at as state_updated_timestamp, " - + "a.updated_at as updated_timestamp, a.created_at as created_timestamp, " - + "md.name as metric_name, mdg.dimensions as metric_dimensions from alarm as a " - + "inner join alarm_definition ad on ad.id = a.alarm_definition_id " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd on mdd.id = am.metric_definition_dimensions_id " - + "inner join metric_definition as md on md.id = mdd.metric_definition_id " - + "left outer join (select dimension_set_id, name, value, group_concat(name, '=', value) as dimensions " - + "from metric_dimension group by dimension_set_id) as mdg on mdg.dimension_set_id = mdd.metric_dimension_set_id " - + "where ad.tenant_id = :tenantId and ad.deleted_at is null %s order by a.id %s"; - - private static final String FIND_ALARMS_SQL = - "select ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, " - + "a.id as alarm_id, a.state, a.lifecycle_state, a.link, a.state_updated_at as state_updated_timestamp, " - + "a.updated_at as updated_timestamp, a.created_at as created_timestamp, " - + "md.name as metric_name, group_concat(mdim.name, '=', mdim.value order by mdim.name) as metric_dimensions " - + "from alarm as a " - + "inner join %1$s as alarm_id_list on alarm_id_list.id = a.id " - + "inner join alarm_definition ad on ad.id = a.alarm_definition_id " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd on mdd.id = am.metric_definition_dimensions_id " - + "inner join metric_definition as md on md.id = mdd.metric_definition_id " - + "left outer join metric_dimension as mdim on mdim.dimension_set_id = mdd.metric_dimension_set_id " - + "group by a.id, md.name, mdim.dimension_set_id " - + "%2$s"; - - @Inject - public AlarmMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) { - this.db = db; - this.persistUtils = persistUtils; - } - - private void buildJoinClauseFor(Map dimensions, StringBuilder sbJoin) { - - if (dimensions == null) { - return; - } - int i = 0; - for (String dimension_key : dimensions.keySet()) { - final String indexStr = String.valueOf(i); - sbJoin.append(" inner join metric_dimension md").append(indexStr).append(" on md") - .append(indexStr) - .append(".name = :dname").append(indexStr); - String dim_value = dimensions.get(dimension_key); - if (!Strings.isNullOrEmpty(dim_value)) { - sbJoin.append(" and ("); - List values = Splitter.on('|').splitToList(dim_value); - for (int j = 0; j < values.size(); j++) { - sbJoin.append(" md").append(indexStr) - .append(".value = :dvalue").append(indexStr).append('_').append(j); - if (j < values.size() - 1) { - sbJoin.append(" or"); - } - } - sbJoin.append(")"); - } - sbJoin.append(" and mdd.metric_dimension_set_id = md") - .append(indexStr).append(".dimension_set_id"); - i++; - } - } - - @Override - public void deleteById(String tenantId, String id) { - final String sql = "delete a from alarm a where a.id = ?"; - - try (Handle h = db.open()) { - // This will throw an EntityNotFoundException if Alarm doesn't exist or has a different tenant id - findAlarm(tenantId, id, h); - h.execute(sql, id); - } - } - - @Override - public List find(String tenantId, String alarmDefId, String metricName, - Map metricDimensions, AlarmState state, - List severities, String lifecycleState, String link, - DateTime stateUpdatedStart, List sortBy, - String offset, int limit, boolean enforceLimit) { - - StringBuilder - sbWhere = - new StringBuilder("(select a.id " - + "from alarm as a, alarm_definition as ad " - + "where ad.id = a.alarm_definition_id " - + " and ad.deleted_at is null " - + " and ad.tenant_id = :tenantId "); - - if (alarmDefId != null) { - sbWhere.append(" and ad.id = :alarmDefId "); - } - - if (metricName != null) { - - sbWhere.append(" and a.id in (select distinct a.id from alarm as a " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd " - + " on mdd.id = am.metric_definition_dimensions_id " - + "inner join (select distinct id from metric_definition " - + " where name = :metricName) as md " - + " on md.id = mdd.metric_definition_id "); - - buildJoinClauseFor(metricDimensions, sbWhere); - - sbWhere.append(")"); - - } else if (metricDimensions != null) { - - sbWhere.append(" and a.id in (select distinct a.id from alarm as a " - + "inner join alarm_metric as am on am.alarm_id = a.id " - + "inner join metric_definition_dimensions as mdd " - + " on mdd.id = am.metric_definition_dimensions_id "); - - buildJoinClauseFor(metricDimensions, sbWhere); - - sbWhere.append(")"); - - } - - if (state != null) { - sbWhere.append(" and a.state = :state"); - } - - sbWhere.append(MySQLUtils.buildSeverityAndClause(severities)); - - if (lifecycleState != null) { - sbWhere.append(" and a.lifecycle_state = :lifecycleState"); - } - - if (link != null) { - sbWhere.append(" and a.link = :link"); - } - - if (stateUpdatedStart != null) { - sbWhere.append(" and a.state_updated_at >= :stateUpdatedStart"); - } - - StringBuilder orderClause = new StringBuilder(); - - if (sortBy != null && !sortBy.isEmpty()) { - // Convert friendly names to column names - replaceFieldName(sortBy, "alarm_id", "a.id"); - replaceFieldName(sortBy, "alarm_definition_id", "ad.id"); - replaceFieldName(sortBy, "alarm_definition_name", "ad.name"); - replaceFieldName(sortBy, "created_timestamp", "a.created_at"); - replaceFieldName(sortBy, "updated_timestamp", "a.updated_at"); - replaceFieldName(sortBy, "state_updated_timestamp", "a.state_updated_at"); - replaceFieldName(sortBy, "state", "FIELD(state, \"OK\", \"UNDETERMINED\", \"ALARM\")"); - replaceFieldName(sortBy, "severity", "FIELD(severity, \"LOW\", \"MEDIUM\", \"HIGH\", \"CRITICAL\")"); - - orderClause.append(" order by "); - orderClause.append(COMMA_JOINER.join(sortBy)); - // if alarm_id is not in the list, add it - if (orderClause.indexOf("a.id") == -1) { - orderClause.append(",a.id ASC"); - } - orderClause.append(' '); - } else { - orderClause.append(" order by a.id ASC "); - } - - sbWhere.append(orderClause); - - if (enforceLimit && limit > 0) { - sbWhere.append(" limit :limit"); - } - - if (offset != null) { - sbWhere.append(" offset "); - sbWhere.append(offset); - sbWhere.append(' '); - } - - sbWhere.append(")"); - - String sql = String.format(FIND_ALARMS_SQL, sbWhere, orderClause); - - try (Handle h = db.open()) { - - final Query> q = h.createQuery(sql).bind("tenantId", tenantId); - - if (alarmDefId != null) { - q.bind("alarmDefId", alarmDefId); - } - - if (metricName != null) { - q.bind("metricName", metricName); - } - - if (state != null) { - q.bind("state", state.name()); - } - - MySQLUtils.bindSeverityToQuery(q, severities); - - if (lifecycleState != null) { - q.bind("lifecycleState", lifecycleState); - } - - if (link != null) { - q.bind("link", link); - } - - if (stateUpdatedStart != null) { - q.bind("stateUpdatedStart", stateUpdatedStart.toString()); - } - - if (enforceLimit && limit > 0) { - q.bind("limit", limit + 1); - } - - DimensionQueries.bindDimensionsToQuery(q, metricDimensions); - - final List> rows = q.list(); - - return createAlarms(tenantId, rows); - - } - } - - private void replaceFieldName(List list, String oldString, String newString) { - for (int i = 0; i < list.size(); i++) { - String listElement = list.get(i); - String columnName = SPACE_SPLITTER.splitToList(listElement).get(0); - if (columnName.equals(oldString)) { - list.set(i, listElement.replace(oldString, newString)); - } - } - } - - @Override - public Alarm findById(String tenantId, String alarmId) { - try (Handle h = db.open()) { - return findAlarm(tenantId, alarmId, h); - } - } - - private Alarm findAlarm(String tenantId, String alarmId, Handle h) { - - final String sql = String.format(FIND_ALARM_BY_ID_SQL, " and a.id = :id", ""); - - final List> rows = h.createQuery(sql).bind("id", alarmId) - .bind("tenantId", tenantId) - .list(); - - if (rows.isEmpty()) { - throw new EntityNotFoundException("No alarm exists for %s", alarmId); - } - - return createAlarms(tenantId, rows).get(0); - } - - private List createAlarms(String tenantId, List> rows) { - Alarm alarm; - String previousAlarmId = null; - final List alarms = new LinkedList<>(); - List alarmedMetrics = null; - for (final Map row : rows) { - final String alarmId = (String) row.get("alarm_id"); - if (!alarmId.equals(previousAlarmId)) { - alarmedMetrics = new ArrayList<>(); - alarm = - new Alarm(alarmId, getString(row, "alarm_definition_id"), getString(row, - "alarm_definition_name"), getString(row, "severity"), alarmedMetrics, - AlarmState.valueOf(getString(row, "state")), - getString(row, "lifecycle_state"), - getString(row, "link"), - new DateTime(((Timestamp)row.get("state_updated_timestamp")).getTime(), DateTimeZone.forID("UTC")), - new DateTime(((Timestamp)row.get("updated_timestamp")).getTime(), DateTimeZone.forID("UTC")), - new DateTime(((Timestamp)row.get("created_timestamp")).getTime(), DateTimeZone.forID("UTC"))); - alarms.add(alarm); - } - previousAlarmId = alarmId; - final Map dimensionMap = new HashMap<>(); - - // Not all Metrics have dimensions (at least theoretically) - if (row.containsKey("metric_dimensions")) { - final String dimensions = getString(row, "metric_dimensions"); - if (dimensions != null && !dimensions.isEmpty()) { - for (String dimension : dimensions.split(",")) { - final String[] parsed_dimension = dimension.split("="); - if (parsed_dimension.length == 2) { - dimensionMap.put(parsed_dimension[0], parsed_dimension[1]); - } else { - logger.error("Failed to parse dimension. Dimension is malformed: {}", dimension); - } - } - } - } - - alarmedMetrics.add(new MetricDefinition(getString(row, "metric_name"), dimensionMap)); - } - return alarms; - } - - private String getString(final Map row, String fieldName) { - return (String) row.get(fieldName); - } - - @Override - public Alarm update(String tenantId, String id, AlarmState state, String lifecycleState, String link) { - Handle h = db.open(); - - try { - h.begin(); - final Alarm originalAlarm = findAlarm(tenantId, id, h); - if (!originalAlarm.getState().equals(state)) { - h.insert( - "update alarm set state = ?, state_updated_at = NOW() where id = ?", - state.name(), id); - } - h.insert("update alarm set lifecycle_state = ?, link = ?, updated_at = NOW() where id = ?", - lifecycleState, link, id); - h.commit(); - return originalAlarm; - } catch (RuntimeException e) { - h.rollback(); - throw e; - } finally { - h.close(); - } - } - - public static class SubAlarm { - - private String id; - private String expression; - - public SubAlarm() { - } - - public SubAlarm(String id, String expression) { - this.id = id; - this.expression = expression; - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getExpression() { - return expression; - } - - public void setExpression(String expression) { - this.expression = expression; - } - } - - @Override - public Map findAlarmSubExpressions(String alarmId) { - try (Handle h = db.open()) { - final List result = h - .createQuery("select * from sub_alarm where alarm_id = :alarmId") - .bind("alarmId", alarmId) - .map(new BeanMapper<>(SubAlarm.class)).list(); - final Map subAlarms = new HashMap<>(result.size()); - - for (SubAlarm row : result) { - subAlarms.put(row.id, AlarmSubExpression.of(row.expression)); - } - - return subAlarms; - } - } - - @Override - public Map> findAlarmSubExpressionsForAlarmDefinition( - String alarmDefinitionId) { - - try (Handle h = db.open()) { - final List> rows = h - .createQuery( - "select sa.* from sub_alarm as sa, alarm as a where sa.alarm_id=a.id and a.alarm_definition_id = :alarmDefinitionId") - .bind("alarmDefinitionId", alarmDefinitionId).list(); - - Map> subAlarms = new HashMap<>(); - for (Map row : rows) { - final String alarmId = (String) row.get("alarm_id"); - Map alarmMap = subAlarms.get(alarmId); - if (alarmMap == null) { - alarmMap = new HashMap<>(); - subAlarms.put(alarmId, alarmMap); - } - - final String id = (String) row.get("id"); - final String expression = (String) row.get("expression"); - alarmMap.put(id, AlarmSubExpression.of(expression)); - } - - return subAlarms; - } - } - - @Override - public AlarmCount getAlarmsCount(String tenantId, String alarmDefId, String metricName, - Map metricDimensions, AlarmState state, - List severities, String lifecycleState, String link, - DateTime stateUpdatedStart, List groupBy, - String offset, int limit) { - final String SELECT_CLAUSE = "SELECT count(*) as count%1$s " - + " FROM alarm AS a " - + " INNER JOIN alarm_definition as ad on ad.id = a.alarm_definition_id "; - - StringBuilder queryBuilder = new StringBuilder(); - - String groupByStr = ""; - String metricSelect; - if (groupBy != null) { - groupByStr = COMMA_JOINER.join(groupBy); - queryBuilder.append(String.format(SELECT_CLAUSE, ',' + groupByStr)); - - if (groupBy.contains("metric_name") || groupBy.contains("dimension_name") || groupBy - .contains("dimension_value")) { - metricSelect = " INNER JOIN (SELECT distinct am.alarm_id%1$s " - + "FROM metric_definition AS md " - + "JOIN metric_definition_dimensions AS mdd on md.id = mdd.metric_definition_id " - + "JOIN metric_dimension AS mdim ON mdd.metric_dimension_set_id = mdim.dimension_set_id " - + "JOIN alarm_metric AS am ON am.metric_definition_dimensions_id = mdd.id)" - + "AS metrics ON a.id = metrics.alarm_id "; - String subSelect = ""; - if (groupBy.contains("metric_name")) { - subSelect = subSelect + ",md.name AS metric_name"; - } - if (groupBy.contains("dimension_name")) { - subSelect = subSelect + ",mdim.name AS dimension_name"; - } - if (groupBy.contains("dimension_value")) { - subSelect = subSelect + ",mdim.value AS dimension_value"; - } - - queryBuilder.append(String.format(metricSelect, subSelect)); - } - } else { - queryBuilder.append(String.format(SELECT_CLAUSE, groupByStr)); - } - - - queryBuilder.append(" INNER JOIN (SELECT a.id " - + "FROM alarm AS a, alarm_definition AS ad " - + "WHERE ad.id = a.alarm_definition_id " - + " AND ad.deleted_at IS NULL " - + " AND ad.tenant_id = :tenantId "); - - if (alarmDefId != null) { - queryBuilder.append(" AND ad.id = :alarmDefId "); - } - - if (metricName != null) { - - queryBuilder.append(" AND a.id IN (SELECT distinct a.id FROM alarm AS a " - + "INNER JOIN alarm_metric AS am ON am.alarm_id = a.id " - + "INNER JOIN metric_definition_dimensions AS mdd " - + " ON mdd.id = am.metric_definition_dimensions_id " - + "INNER JOIN (SELECT distinct id FROM metric_definition " - + " WHERE name = :metricName) AS md " - + " ON md.id = mdd.metric_definition_id "); - - buildJoinClauseFor(metricDimensions, queryBuilder); - - queryBuilder.append(")"); - - } else if (metricDimensions != null) { - - queryBuilder.append(" AND a.id IN (SELECT distinct a.id FROM alarm AS a " - + "INNER JOIN alarm_metric AS am ON am.alarm_id = a.id " - + "INNER JOIN metric_definition_dimensions AS mdd " - + " ON mdd.id = am.metric_definition_dimensions_id "); - - buildJoinClauseFor(metricDimensions, queryBuilder); - - queryBuilder.append(")"); - - } - - if (state != null) { - queryBuilder.append(" AND a.state = :state"); - } - - queryBuilder.append(MySQLUtils.buildSeverityAndClause(severities)); - - if (lifecycleState != null) { - queryBuilder.append(" AND a.lifecycle_state = :lifecycleState"); - } - - if (link != null) { - queryBuilder.append(" AND a.link = :link"); - } - - if (stateUpdatedStart != null) { - queryBuilder.append(" AND a.state_updated_at >= :stateUpdatedStart"); - } - - queryBuilder.append(") AS alarm_id_list ON alarm_id_list.id = a.id "); - - if (groupBy != null) { - queryBuilder.append(" GROUP BY "); - queryBuilder.append(groupByStr); - } - - queryBuilder.append(" ORDER BY "); - if (!Strings.isNullOrEmpty(groupByStr)) { - queryBuilder.append(groupByStr); - } else { - queryBuilder.append(" a.id "); - } - - queryBuilder.append(" LIMIT :limit"); - - if (offset != null) { - queryBuilder.append(String.format(" OFFSET %1$s ", offset)); - } - - - - try (Handle h = db.open()) { - - final Query> q = h.createQuery(queryBuilder.toString()).bind("tenantId", tenantId); - - if (alarmDefId != null) { - q.bind("alarmDefId", alarmDefId); - } - - if (metricName != null) { - q.bind("metricName", metricName); - } - - if (state != null) { - q.bind("state", state.name()); - } - - MySQLUtils.bindSeverityToQuery(q, severities); - - if (lifecycleState != null) { - q.bind("lifecycleState", lifecycleState); - } - - if (link != null) { - q.bind("link", link); - } - - if (stateUpdatedStart != null) { - q.bind("stateUpdatedStart", stateUpdatedStart.toString()); - } - - q.bind("limit", limit + 1); - - DimensionQueries.bindDimensionsToQuery(q, metricDimensions); - - final List> rows = q.list(); - - return createAlarmCounts(groupBy, rows); - - } - } - - private AlarmCount createAlarmCounts(List groupBy, List> rows) { - List> counts = new ArrayList<>(); - - // if no results, return 0 and fill columns with null - if (rows.size() == 0) { - List countsAndTags = new ArrayList<>(); - countsAndTags.add(0); - if (groupBy != null) { - for (final String columnName : groupBy) { - countsAndTags.add(null); - } - } - counts.add(countsAndTags); - return new AlarmCount(groupBy, counts); - } - - for (final Map row : rows) { - List countAndTags = new ArrayList<>(); - countAndTags.add(row.get("count")); - if (groupBy != null && !groupBy.isEmpty()) { - for (final String columnName : groupBy) { - countAndTags.add(row.get(columnName)); - } - } - counts.add(countAndTags); - } - - return new AlarmCount(groupBy, counts); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/MySQLUtils.java b/java/src/main/java/monasca/api/infrastructure/persistence/mysql/MySQLUtils.java deleted file mode 100644 index 81772b696..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/MySQLUtils.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * (C) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.mysql; - -import com.google.inject.Inject; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.skife.jdbi.v2.util.StringMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Map; - -import javax.inject.Named; - -import monasca.api.infrastructure.persistence.Utils; -import monasca.common.model.alarm.AlarmSeverity; - -public class MySQLUtils - extends Utils { - - private static final Logger logger = - LoggerFactory.getLogger(MySQLUtils.class); - - private final DBI mysql; - - @Inject - public MySQLUtils(@Named("mysql") DBI mysql) { - this.mysql = mysql; - } - - public List findAlarmIds(String tenantId, - Map dimensions) { - - final String FIND_ALARM_IDS_SQL = - "select distinct a.id " - + "from alarm as a " - + "join alarm_definition as ad on a.alarm_definition_id = ad.id " - + "%s " - + "where ad.tenant_id = :tenantId and ad.deleted_at is NULL " - + "order by ad.created_at"; - - List alarmIdList; - - try (Handle h = this.mysql.open()) { - - final String sql = String.format(FIND_ALARM_IDS_SQL, this.buildJoinClauseFor(dimensions)); - - Query> query = h.createQuery(sql).bind("tenantId", tenantId); - - logger.debug("mysql sql: {}", sql); - - this.bindDimensionsToQuery(query, dimensions); - - alarmIdList = query.map(StringMapper.FIRST).list(); - } - - return alarmIdList; - } - - private void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Map.Entry entry : dimensions.entrySet()) { - query.bind("dname" + i, entry.getKey()); - query.bind("dvalue" + i, entry.getValue()); - i++; - } - } - } - - public static String buildSeverityAndClause(List severities) { - StringBuilder sbWhere = new StringBuilder(); - if (severities != null && !severities.isEmpty()) { - sbWhere.append(" and ("); - for (int i = 0; i < severities.size(); i++) { - sbWhere.append("ad.severity = :severity").append(i); - if (i < severities.size() - 1) { - sbWhere.append(" or "); - } - } - sbWhere.append(") "); - } - return sbWhere.toString(); - } - - public static void bindSeverityToQuery(Query query, List severities) { - if (severities != null && !severities.isEmpty()) { - for (int i = 0; i < severities.size(); i++) { - query.bind("severity" + String.valueOf(i), severities.get(i).name()); - } - } - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepoImpl.java deleted file mode 100644 index daa0478ef..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepoImpl.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.mysql; - -import java.util.List; -import java.util.Map; -import java.util.UUID; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Joiner; - -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.persistence.BeanMapper; - -/** - * Notification method repository implementation. - */ -public class NotificationMethodMySqlRepoImpl implements NotificationMethodRepo { - private static final Logger LOG = LoggerFactory - .getLogger(NotificationMethodMySqlRepoImpl.class); - private static final Joiner COMMA_JOINER = Joiner.on(','); - private final DBI db; - private final PersistUtils persistUtils; - - @Inject - public NotificationMethodMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) { - this.db = db; - this.persistUtils = persistUtils; - } - - @Override - public NotificationMethod create(String tenantId, String name, - String notificationMethodType, String address, int period) { - Handle h = db.open(); - try { - h.begin(); - if (getNotificationIdForTenantIdAndName(h,tenantId, name) != null) - throw new EntityExistsException( - "Notification method %s \"%s\" already exists.", tenantId, name); - - if (!isValidNotificationMethodType(h, notificationMethodType)) { - throw new EntityNotFoundException("Not a valid notification method type %s ", notificationMethodType); - } - - String id = UUID.randomUUID().toString(); - h.insert( - "insert into notification_method (id, tenant_id, name, type, address, period, created_at, updated_at) values (?, ?, ?, ?, ?, ?, NOW(), NOW())", - id, tenantId, name, notificationMethodType, address, period); - LOG.debug("Creating notification method {} for {}", name, tenantId); - h.commit(); - return new NotificationMethod(id, name, notificationMethodType, address, period); - } catch (RuntimeException e) { - h.rollback(); - throw e; - } finally { - h.close(); - } - } - - @Override - public void deleteById(String tenantId, String notificationMethodId) { - try (Handle h = db.open()) { - if (h.update("delete from notification_method where tenant_id = ? and id = ?", tenantId, - notificationMethodId) == 0) - throw new EntityNotFoundException("No notification method exists for %s", - notificationMethodId); - } - } - - @Override - public boolean exists(String tenantId, String notificationMethodId) { - try (Handle h = db.open()) { - return h - .createQuery( - "select exists(select 1 from notification_method where tenant_id = :tenantId and id = :notificationMethodId)") - .bind("tenantId", tenantId).bind("notificationMethodId", notificationMethodId) - .mapTo(Boolean.TYPE).first(); - } - } - - private String getNotificationIdForTenantIdAndName(Handle h,String tenantId, String name) { - Map map = h - .createQuery( - "select id from notification_method where tenant_id = :tenantId and name = :name") - .bind("tenantId", tenantId).bind("name", name).first(); - if (map != null && !map.isEmpty()) { - return map.get("id").toString(); - } - else { - return null; - } - } - - private boolean isValidNotificationMethodType(Handle h ,String notificationMethod){ - - String query = " SELECT name from notification_method_type"; - - Query> q = h.createQuery(query); - List> result = q.list(); - - - for (Map m : result) { - String method = (String)m.get("name"); - if (method.equalsIgnoreCase(notificationMethod)) - return true; - } - return false; - } - - - @Override - public List find(String tenantId, List sortBy, String offset, - int limit) { - - try (Handle h = db.open()) { - - String rawQuery = - " SELECT nm.id, nm.tenant_id, nm.name, nm.type, nm.address, nm.period, nm.created_at, nm.updated_at " - + "FROM notification_method as nm " - + "WHERE tenant_id = :tenantId %1$s %2$s %3$s"; - - String orderByPart = ""; - if (sortBy != null && !sortBy.isEmpty()) { - orderByPart = " order by " + COMMA_JOINER.join(sortBy); - if (!orderByPart.contains("id")) { - orderByPart = orderByPart + ",id"; - } - } else { - orderByPart = " order by id "; - } - - String limitPart = ""; - if (limit > 0) { - limitPart = " limit :limit"; - } - - String offsetPart = ""; - if (offset != null && !offset.isEmpty()) { - offsetPart = " offset :offset "; - } - - String query = String.format(rawQuery, orderByPart, limitPart, offsetPart); - - Query q = h.createQuery(query); - - q.bind("tenantId", tenantId); - - if (limit > 0) { - q.bind("limit", limit + 1); - } - - if (offset != null && !offset.isEmpty()) { - q.bind("offset", Integer.parseInt(offset)); - } - - return q.map(new BeanMapper<>(NotificationMethod.class)).list(); - - } - } - - @Override - public NotificationMethod findById(String tenantId, String notificationMethodId) { - try (Handle h = db.open()) { - NotificationMethod notificationMethod = - h.createQuery( - "select * from notification_method where tenant_id = :tenantId and id = :id") - .bind("tenantId", tenantId).bind("id", notificationMethodId) - .map(new BeanMapper(NotificationMethod.class)).first(); - - if (notificationMethod == null) - throw new EntityNotFoundException("No notification method exists for %s", - notificationMethodId); - - return notificationMethod; - } - } - - @Override - public NotificationMethod update(String tenantId, String notificationMethodId, String name, - String notificationMethodType, String address, int period) { - Handle h = db.open(); - try { - h.begin(); - String notificationID = getNotificationIdForTenantIdAndName(h,tenantId, name); - if (notificationID != null && !notificationID.equalsIgnoreCase(notificationMethodId)) { - throw new EntityExistsException("Notification method %s \"%s\" already exists.", - tenantId, name); - } - if (!isValidNotificationMethodType(h, notificationMethodType)) { - throw new EntityNotFoundException("Not a valid notification method type %s ", notificationMethodType); - } - - if (h - .update( - "update notification_method set name = ?, type = ?, address = ?, period = ?, updated_at = NOW() " - + "where tenant_id = ? and id = ?", - name, notificationMethodType, address, period, tenantId, notificationMethodId) == 0) - throw new EntityNotFoundException("No notification method exists for %s", - notificationMethodId); - h.commit(); - return new NotificationMethod(notificationMethodId, name, notificationMethodType, address, period); - } catch (RuntimeException e) { - h.rollback(); - throw e; - } finally { - h.close(); - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypesMySqlRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypesMySqlRepoImpl.java deleted file mode 100644 index aa28e3a49..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypesMySqlRepoImpl.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.mysql; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.domain.model.notificationmethod.NotificationMethodTypesRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -/** - * Notification method repository implementation. - */ -public class NotificationMethodTypesMySqlRepoImpl implements NotificationMethodTypesRepo { - private static final Logger LOG = LoggerFactory - .getLogger(NotificationMethodTypesMySqlRepoImpl.class); - - private final DBI db; - private final PersistUtils persistUtils; - - @Inject - public NotificationMethodTypesMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) { - this.db = db; - this.persistUtils = persistUtils; - } - - @Override - public List listNotificationMethodTypes() { - - List notification_method_types = new ArrayList(); - try (Handle h = db.open()) { - - String query = " SELECT name from notification_method_type"; - - Query> q = h.createQuery(query); - List> result = q.list(); - - for (Map m : result) { - notification_method_types.add((String)m.get("name")); - } - return notification_method_types; - - } - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepoImpl.java deleted file mode 100644 index e9f36ef5c..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepoImpl.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; -import monasca.api.infrastructure.persistence.DimensionQueries; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.infrastructure.persistence.Utils; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmTransitionSubAlarm; -import monasca.common.model.metric.MetricDefinition; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Named; - -public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo { - - private static final Logger logger = - LoggerFactory.getLogger(AlarmStateHistoryVerticaRepoImpl.class); - - private static final String FIND_BY_ALARM_IDS_SQL = - "select alarm_id, metrics, old_state, new_state, reason, reason_data, sub_alarms, time_stamp as timestamp " - + "from MonAlarms.StateHistory " - + "where tenant_id = :tenantId %s " - + "order by time_stamp asc " - + "limit :limit"; - - private static final String FIND_BY_ALARM_ID_SQL = - "select alarm_id, metrics, old_state, new_state, reason, reason_data, sub_alarms, time_stamp as timestamp " - + "from MonAlarms.StateHistory " - + "where tenant_id = :tenantId and alarm_id = :alarmId %s " - + "order by time_stamp asc " - + "limit :limit"; - - private static final ObjectMapper objectMapper = new ObjectMapper(); - - static { - objectMapper - .setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - } - - private static final TypeReference> METRICS_TYPE = - new TypeReference>() {}; - - private static final TypeReference> SUB_ALARMS_TYPE = - new TypeReference>() {}; - - private final DBI vertica; - private final Utils utils; - private final PersistUtils persistUtils; - - private final ThreadLocal simpleDateFormatter; - - @Inject - public AlarmStateHistoryVerticaRepoImpl( - @Named("vertica") DBI vertica, - Utils utils, - PersistUtils persistUtils) { - - this.vertica = vertica; - this.utils = utils; - this.persistUtils = persistUtils; - this.simpleDateFormatter = new ThreadLocal<>(); - } - - @Override - public List findById( - String tenantId, - String alarmId, - String offset, - int limit) { - - String offsetPart = ""; - - if (offset != null && !offset.isEmpty()) { - - offsetPart = (" and time_stamp > :offset"); - - } - - String sql = String.format(FIND_BY_ALARM_ID_SQL, offsetPart); - - logger.debug("vertica sql: {}", sql); - - List alarmStateHistoryList = new ArrayList<>(); - - try (Handle h = vertica.open()) { - - Query> verticaQuery = - h.createQuery(sql) - .bind("tenantId", tenantId) - .bind("alarmId", alarmId) - .bind("limit", limit + 1); - - if (offset != null && !offset.isEmpty()) { - - DateTime offset_dt = new DateTime(offset); - verticaQuery.bind("offset", formatDateFromMillis(offset_dt.getMillis())); - - } - - for (Map row : verticaQuery.list()) { - - alarmStateHistoryList.add(getAlarmStateHistory(row)); - - } - - return alarmStateHistoryList; - } - - } - - private String formatDateFromMillis(final long millis) { - if (simpleDateFormatter.get() == null) { - simpleDateFormatter.set(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")); - simpleDateFormatter.get().setTimeZone(TimeZone.getTimeZone("GMT-0")); - } - return simpleDateFormatter.get().format(new Date(millis)); - } - - @Override - public List find( - String tenantId, - Map dimensions, - DateTime startTime, - @Nullable DateTime endTime, - @Nullable String offset, - int limit) { - - List alarmIds = this.utils.findAlarmIds(tenantId, dimensions); - - if (alarmIds == null || alarmIds.isEmpty()) { - - logger.debug("list of alarm ids is empty"); - - return Collections.emptyList(); - - } - - StringBuilder sb = new StringBuilder(); - - sb.append(" and alarm_id in ("); - - for (int i = 0; i < alarmIds.size(); i++) { - - if (i > 0) { - - sb.append(", "); - } - - sb.append('\'').append(alarmIds.get(i)).append('\''); - - } - - sb.append(')'); - - if (startTime != null) { - - sb.append(" and time_stamp >= :startTime"); - - } - - if (endTime != null) { - - sb.append(" and time_stamp <= :endTime"); - - } - - if (offset != null && !offset.isEmpty()) { - - sb.append(" and time_stamp > :offset"); - - } - - String sql = String.format(FIND_BY_ALARM_IDS_SQL, sb); - - logger.debug("vertica sql: {}", sql); - - List alarmStateHistoryList = new ArrayList<>(); - - try (Handle h = vertica.open()) { - - Query> verticaQuery = - h.createQuery(sql) - .bind("tenantId", tenantId) - .bind("limit", limit + 1); - - if (startTime != null) { - - logger.debug("binding startime: {}", startTime); - - // Timestamp will not work in this query for some unknown reason. - verticaQuery.bind("startTime", formatDateFromMillis(startTime.getMillis())); - - } - - if (endTime != null) { - - logger.debug("binding endtime: {}", endTime); - - // Timestamp will not work in this query for some unknown reason. - verticaQuery.bind("endTime", formatDateFromMillis(endTime.getMillis())); - - } - - if (offset != null && !offset.isEmpty()) { - - logger.debug("binding offset: {}", offset); - - DateTime offset_dt = new DateTime(offset); - verticaQuery.bind("offset", formatDateFromMillis(offset_dt.getMillis())); - - } - - DimensionQueries.bindDimensionsToQuery(verticaQuery, dimensions); - - for (Map row : verticaQuery.list()) { - - alarmStateHistoryList.add(getAlarmStateHistory(row)); - - } - - } - - return alarmStateHistoryList; - - } - - private AlarmStateHistory getAlarmStateHistory(Map row) { - - AlarmStateHistory alarmStateHistory = new AlarmStateHistory(); - - Date date; - - try { - - date = this.persistUtils.parseTimestamp(row.get("timestamp").toString() + "Z"); - - } catch (ParseException e) { - - logger.error("Failed to parse time", e); - - return null; - } - - DateTime dateTime = new DateTime(date.getTime(), DateTimeZone.UTC); - alarmStateHistory.setTimestamp(dateTime); - - alarmStateHistory.setAlarmId((String) row.get("alarm_id")); - - List metricDefinitionList; - try { - - metricDefinitionList = objectMapper.readValue((String) row.get("metrics"), METRICS_TYPE); - - } catch (IOException e) { - - logger.error("Failed to parse metrics", e); - - metricDefinitionList = new ArrayList<>(); - } - - alarmStateHistory.setMetrics(metricDefinitionList); - - alarmStateHistory.setOldState(AlarmState.valueOf((String) row.get("old_state"))); - alarmStateHistory.setNewState(AlarmState.valueOf((String) row.get("new_state"))); - alarmStateHistory.setReason((String) row.get("reason")); - alarmStateHistory.setReasonData((String) row.get("reason_data")); - - List subAlarmList; - try { - - subAlarmList = objectMapper.readValue((String) row.get("sub_alarms"), SUB_ALARMS_TYPE); - - } catch (IOException e) { - - logger.error("Failed to parse sub-alarms", e); - - subAlarmList = new ArrayList<>(); - } - - alarmStateHistory.setSubAlarms(subAlarmList); - - return alarmStateHistory; - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/DimensionVerticaRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/DimensionVerticaRepoImpl.java deleted file mode 100644 index dcac52cef..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/DimensionVerticaRepoImpl.java +++ /dev/null @@ -1,196 +0,0 @@ -/* (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.dimension.DimensionName; -import monasca.api.domain.model.dimension.DimensionRepo; -import monasca.api.domain.model.dimension.DimensionValue; - -import com.google.common.base.Strings; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DimensionVerticaRepoImpl implements DimensionRepo { - - private static final Logger logger = LoggerFactory - .getLogger(DimensionVerticaRepoImpl.class); - - private static final String FIND_DIMENSION_VALUES_SQL = - "SELECT %s" // dbHint goes here - + " DISTINCT dims.value as dValue " - + "FROM " - + " MonMetrics.Definitions def," - + " MonMetrics.DefinitionDimensions defdims " - + "LEFT OUTER JOIN" - + " MonMetrics.Dimensions dims" - + " ON dims.dimension_set_id = defdims.dimension_set_id " - + "WHERE" - + " def.id = defdims.definition_id" - + " %s " // optional offset goes here - + " %s " // optional metric name goes here - + " and def.tenant_id = '%s'" // tenant_id goes here - + " and dims.name = '%s' " // dimension name goes here - + "ORDER BY dims.value ASC " - + "%s "; // limit goes here - - private static final String FIND_DIMENSION_NAMES_SQL = - "SELECT %s" // dbHint goes here - + " DISTINCT dims.name as dName " - + "FROM " - + " MonMetrics.Definitions def," - + " MonMetrics.DefinitionDimensions defdims " - + "LEFT OUTER JOIN" - + " MonMetrics.Dimensions dims" - + " ON dims.dimension_set_id = defdims.dimension_set_id " - + "WHERE " - + " def.id = defdims.definition_id" - + " %s " // optional offset goes here - + " %s " // metric name goes here - + " and def.tenant_id = '%s' " // tenant_id goes here - + "ORDER BY dims.name ASC " - + "%s "; // limit goes here - - private final DBI db; - private final String dbHint; - - @Inject - public DimensionVerticaRepoImpl( - @Named("vertica") DBI db, ApiConfig config) - { - this.db = db; - this.dbHint = config.vertica.dbHint; - } - - @Override - public List findValues( - String metricName, - String tenantId, - String dimensionName, - String offset, - int limit) throws Exception - { - String offsetPart = ""; - String metricNamePart = ""; - - try (Handle h = db.open()) { - - if (offset != null && !offset.isEmpty()) { - offsetPart = " and dims.value > :offset"; - } - - if (metricName != null && !metricName.isEmpty()) { - metricNamePart = " and def.name = :metricName"; - } - - String limitPart = " limit " + Integer.toString(limit + 1); - - String sql = String.format(FIND_DIMENSION_VALUES_SQL, - this.dbHint, - offsetPart, - metricNamePart, - tenantId, - dimensionName, - limitPart); - - Query> query = h.createQuery(sql); - - - if (!Strings.isNullOrEmpty(offset)) { - logger.debug("binding offset: {}", offset); - query.bind("offset", offset); - } - - if (!Strings.isNullOrEmpty(metricName)) { - logger.debug("binding metricName: {}", metricName); - query.bind("metricName", metricName); - } - - List> rows = query.list(); - - List dimensionValuesList = new ArrayList<>(rows.size()); - - for (Map row : rows) { - String dimensionValue = (String) row.get("dValue"); - DimensionValue dimValue = new DimensionValue(metricName, dimensionName, dimensionValue); - dimensionValuesList.add(dimValue); - } - return dimensionValuesList; - } - - } - - @Override - public List findNames( - String metricName, - String tenantId, - String offset, - int limit) throws Exception - { - String offsetPart = ""; - String metricNamePart = ""; - - try (Handle h = db.open()) { - - if (!Strings.isNullOrEmpty(offset)) { - offsetPart = " and dims.name > :offset"; - } - - if (!Strings.isNullOrEmpty(metricName)) { - metricNamePart = " and def.name = :metricName"; - } - - String limitPart = " limit " + Integer.toString(limit + 1); - - String sql = String.format(FIND_DIMENSION_NAMES_SQL, - this.dbHint, - offsetPart, - metricNamePart, - tenantId, - limitPart); - - Query> query = h.createQuery(sql); - - if (!Strings.isNullOrEmpty(offset)) { - logger.debug("binding offset: {}", offset); - query.bind("offset", offset); - } - - if (!Strings.isNullOrEmpty(metricName)) { - logger.debug("binding metricName: {}", metricName); - query.bind("metricName", metricName); - } - - List> rows = query.list(); - List dimensionNamesList = new ArrayList<>(rows.size()); - - for (Map row : rows) { - String dimensionName = (String) row.get("dName"); - DimensionName dimName = new DimensionName(metricName, dimensionName); - dimensionNamesList.add(dimName); - } - return dimensionNamesList; - } - - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepoImpl.java deleted file mode 100644 index 219620a5d..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepoImpl.java +++ /dev/null @@ -1,304 +0,0 @@ -/* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.domain.exception.MultipleMetricsException; -import monasca.api.domain.model.measurement.MeasurementRepo; -import monasca.api.domain.model.measurement.Measurements; -import monasca.api.ApiConfig; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Named; - -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MeasurementVerticaRepoImpl implements MeasurementRepo { - - private static final Logger logger = LoggerFactory - .getLogger(MeasurementVerticaRepoImpl.class); - - public static final DateTimeFormatter DATETIME_FORMATTER = - ISODateTimeFormat.dateTime().withZoneUTC(); - - private static final String FIND_BY_METRIC_DEF_SQL = - "SELECT %s " // db hint to satisfy query - + "%s to_hex(mes.definition_dimensions_id) as def_dims_id, " // select for groupBy if present - + "mes.time_stamp, mes.value, mes.value_meta " - + "FROM MonMetrics.Measurements mes " - + "%s" // joins for group by - + "WHERE mes.time_stamp >= :startTime " - + "%s " // endtime and offset here - + "AND TO_HEX(definition_dimensions_id) IN (%s) " // id subquery here - + "ORDER BY %s" // sort by id if not merging - + "mes.time_stamp ASC " - + "LIMIT :limit"; - - private final DBI db; - - private final ObjectMapper objectMapper = new ObjectMapper(); - - private final static TypeReference VALUE_META_TYPE = new TypeReference>() {}; - - private final String dbHint; - - @Inject - public MeasurementVerticaRepoImpl( - @Named("vertica") DBI db, ApiConfig config) - { - this.db = db; - this.dbHint = config.vertica.dbHint; - } - - @Override - public List find( - String tenantId, - String name, - Map dimensions, - DateTime startTime, - @Nullable DateTime endTime, - @Nullable String offset, - int limit, - Boolean mergeMetricsFlag, - List groupBy) throws MultipleMetricsException { - - try (Handle h = db.open()) { - - Map results = new HashMap<>(); - - if (groupBy.isEmpty() && !Boolean.TRUE.equals(mergeMetricsFlag)) { - MetricQueries.checkForMultipleDefinitions(h, tenantId, name, dimensions); - } - - StringBuilder endtimeAndOffsetSql = new StringBuilder(); - - if (endTime != null) { - - endtimeAndOffsetSql.append(" and mes.time_stamp <= :endTime"); - - } - - String concatGroupByString = MetricQueries.buildGroupByConcatString(groupBy); - - if (offset != null && !offset.isEmpty()) { - - if (!groupBy.isEmpty() && groupBy.contains("*")) { - - endtimeAndOffsetSql.append(" and (TO_HEX(mes.definition_dimensions_id) > :offset_id " - + "or (TO_HEX(mes.definition_dimensions_id) = :offset_id and mes.time_stamp > :offset_timestamp)) "); - - } else if (!groupBy.isEmpty()){ - - endtimeAndOffsetSql.append(" AND (").append(concatGroupByString) - .append(" > :offset_id OR (").append(concatGroupByString) - .append(" = :offset_id AND mes.time_stamp > :offset_timestamp)) "); - - } else { - - endtimeAndOffsetSql.append(" and mes.time_stamp > :offset_timestamp "); - - } - - } - - String orderById = ""; - if (Boolean.FALSE.equals(mergeMetricsFlag)) { - - if (!groupBy.isEmpty() && !groupBy.contains("*")) { - orderById += MetricQueries.buildGroupByCommaString(groupBy) + ','; - } - if (orderById.isEmpty()) - orderById += "mes.definition_dimensions_id,"; - } - - String groupBySelect = concatGroupByString; - if (!groupBySelect.isEmpty()) - groupBySelect += " as dimension_values, "; - - String sql = String.format( - FIND_BY_METRIC_DEF_SQL, - this.dbHint, - groupBySelect, - MetricQueries.buildGroupBySql(groupBy), - endtimeAndOffsetSql, - MetricQueries.buildMetricDefinitionSubSql(name, dimensions, null, null), - orderById); - - logger.debug(sql); - - Query> query = h.createQuery(sql) - .bind("tenantId", tenantId) - .bind("startTime", new Timestamp(startTime.getMillis())) - .bind("limit", limit + 1); - - if (name != null && !name.isEmpty()) { - query.bind("name", name); - } - - MetricQueries.bindDimensionsToQuery(query, dimensions); - - if (endTime != null) { - logger.debug("binding endtime: {}", endTime); - - query.bind("endTime", new Timestamp(endTime.getMillis())); - - } - - if (!groupBy.isEmpty() && !groupBy.contains("*")) { - logger.debug("binding groupBy: {}", groupBy); - - MetricQueries.bindGroupBy(query, groupBy); - } - - if (offset != null && !offset.isEmpty()) { - logger.debug("binding offset: {}", offset); - - MetricQueries.bindOffsetToQuery(query, offset); - - } - - List> rows = query.list(); - - if (rows.size() == 0) { - return new ArrayList<>(); - } - - if (!groupBy.isEmpty() && groupBy.contains("*")) { - - String currentDefId = null; - - for (Map row : rows) { - - String defDimsId = (String) row.get("def_dims_id"); - - if (defDimsId != null && !defDimsId.equals(currentDefId)) { - currentDefId = defDimsId; - results.put(defDimsId, new Measurements()); - } - - List measurement = parseRow(row); - - results.get(defDimsId).addMeasurement(measurement); - - } - - MetricQueries.addDefsToResults(results, h, this.dbHint); - - } else if (!groupBy.isEmpty()) { - - String currentId = null; - - for (Map row : rows) { - - String dimensionValues = (String) row.get("dimension_values"); - - if (dimensionValues != null && !dimensionValues.equals(currentId)) { - currentId = dimensionValues; - - Measurements tmp = new Measurements(); - tmp.setId(dimensionValues); - tmp.setName(name); - tmp.setDimensions(MetricQueries.combineGroupByAndValues(groupBy, dimensionValues)); - - results.put(dimensionValues, tmp); - } - - List measurement = parseRow(row); - - results.get(dimensionValues).addMeasurement(measurement); - - } - - } else { - - Measurements firstMeasurement = new Measurements(); - - firstMeasurement.setName(name); - - String firstDefDimsId = (String) rows.get(0).get("def_dims_id"); - - for (Map row : rows) { - - List measurement = parseRow(row); - - firstMeasurement.addMeasurement(measurement); - - } - - results.put(firstDefDimsId, firstMeasurement); - - if (!Boolean.TRUE.equals(mergeMetricsFlag)) { - firstMeasurement.setId(firstDefDimsId); - MetricQueries.addDefsToResults(results, h, this.dbHint); - } else { - if (dimensions == null) { - dimensions = new HashMap<>(); - } - firstMeasurement.setDimensions(dimensions); - } - - } - - List returnValue = new ArrayList<>(results.values()); - Collections.sort(returnValue); - - return returnValue; - } - } - - private List parseRow(Map row) { - - String timestamp = DATETIME_FORMATTER.print(((Timestamp) row.get("time_stamp")).getTime()); - - double value = (double) row.get("value"); - - String valueMetaString = (String) row.get("value_meta"); - - Map valueMetaMap = new HashMap<>(); - - if (valueMetaString != null && !valueMetaString.isEmpty()) { - - try { - - valueMetaMap = this.objectMapper.readValue(valueMetaString, VALUE_META_TYPE); - - } catch (IOException e) { - - logger.error("failed to parse value metadata: {}", valueMetaString); - } - - } - - return Arrays.asList(timestamp, value, valueMetaMap); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepoImpl.java deleted file mode 100644 index 56b11da52..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepoImpl.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * (C) Copyright 2014, 2016, 2017 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.domain.model.metric.MetricName; -import monasca.api.resource.exception.Exceptions; -import monasca.api.ApiConfig; -import monasca.common.model.metric.MetricDefinition; - -import org.apache.commons.codec.DecoderException; -import org.apache.commons.codec.binary.Hex; -import org.joda.time.DateTime; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; - -public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo { - - private static final Logger - logger = - LoggerFactory.getLogger(MetricDefinitionVerticaRepoImpl.class); - - private static final String METRIC_DEF_SUB_QUERY = - "SELECT TO_HEX(defDimsSub.id) " - + "FROM MonMetrics.Definitions defSub " - + "JOIN MonMetrics.DefinitionDimensions defDimsSub ON defSub.id = defDimsSub.definition_id " - + "%s " // possible measurements time join here - + "WHERE defSub.tenant_id = :tenantId " - + "%s " // Name goes here - + "%s " // Offset goes here - + "%s " // Dimensions and clause goes here - + "%s " // possible time and clause here - + "GROUP BY defDimsSub.id " - + "ORDER BY defDimsSub.id ASC " - + "%s "; // limit goes here - - private static final String FIND_METRIC_NAMES_SQL = - "SELECT %s distinct def.name " - + "FROM MonMetrics.Definitions def " - + "JOIN MonMetrics.DefinitionDimensions defDimsSub ON def.id = defDimsSub.definition_id " - + "WHERE def.tenant_id = :tenantId " // tenantId - + "%s " // Offset goes here - + "%s " // Dimensions and clause goes here - + "ORDER BY def.name ASC " - + "%s "; // Limit goes here. - - private static final String TABLE_TO_JOIN_ON = "defDimsSub"; - - private final DBI db; - - private final String dbHint; - - @Inject - public MetricDefinitionVerticaRepoImpl(@Named("vertica") DBI db, ApiConfig config) - { - this.db = db; - this.dbHint = config.vertica.dbHint; - } - - @Override - public List findNames( - String tenantId, Map dimensions, - String offset, - int limit) throws Exception { - - List> rows = executeMetricNamesQuery(tenantId, dimensions, offset, limit); - - List metricNameList = new ArrayList<>(rows.size()); - - for (Map row : rows) { - - String name = (String) row.get("name"); - - MetricName metricName = new MetricName(name); - - metricNameList.add(metricName); - - } - - return metricNameList; - - } - - private List> executeMetricNamesQuery( - String tenantId, - Map dimensions, - String offset, - int limit) { - - String offsetPart = ""; - - if (offset != null && !offset.isEmpty()) { - - offsetPart = " and def.name > '" + offset + "' "; - - } - - // Can't bind limit in a nested sub query. So, just tack on as String. - String limitPart = " limit " + Integer.toString(limit + 1); - - try (Handle h = db.open()) { - String sql = String.format( - FIND_METRIC_NAMES_SQL, - this.dbHint, - offsetPart, - MetricQueries.buildDimensionAndClause(dimensions, TABLE_TO_JOIN_ON), - limitPart); - - Query> query = h.createQuery(sql).bind("tenantId", tenantId); - - MetricQueries.bindDimensionsToQuery(query, dimensions); - - return query.list(); - - } - } - - @Override - public List find( - String tenantId, - String name, - Map dimensions, - DateTime startTime, - DateTime endTime, - String offset, - int limit) { - - List> - rows = - executeMetricDefsQuery(tenantId, name, dimensions, startTime, endTime, offset, limit); - - List metricDefs = new ArrayList<>(rows.size()); - - String currentDefDimId = null; - - Map dims = null; - - for (Map row : rows) { - - String defDimId = (String) row.get("defDimsId"); - - String metricName = (String) row.get("name"); - - String dimName = (String) row.get("dName"); - - String dimValue = (String) row.get("dValue"); - - if (defDimId == null || !defDimId.equals(currentDefDimId)) { - - currentDefDimId = defDimId; - - dims = new HashMap<>(); - - if (dimName != null && dimValue != null) { - - dims.put(dimName, dimValue); - - } - - MetricDefinition m = new MetricDefinition(metricName, dims); - m.setId(defDimId); - metricDefs.add(m); - - - } else { - - dims.put(dimName, dimValue); - - } - } - - return metricDefs; - } - - private List> executeMetricDefsQuery( - String tenantId, - String name, - Map dimensions, - DateTime startTime, - DateTime endTime, - String offset, - int limit) { - - String namePart = ""; - - if (name != null && !name.isEmpty()) { - - namePart = " and defSub.name = :name "; - - } - - String offsetPart = ""; - - if (offset != null && !offset.isEmpty()) { - - offsetPart = " and defDimsSub.id > :offset "; - - } - - String limitPart = ""; - - if (limit > 0) { - - limitPart = "limit " + Integer.toString(limit + 1); - - } - - try (Handle h = db.open()) { - - String sql = - String.format(MetricQueries.FIND_METRIC_DEFS_SQL, - this.dbHint, - String.format(METRIC_DEF_SUB_QUERY, - MetricQueries.buildTimeJoin(startTime), - namePart, - offsetPart, - MetricQueries.buildDimensionAndClause(dimensions, - TABLE_TO_JOIN_ON), - MetricQueries.buildTimeAndClause(startTime, endTime), - limitPart) - ); - - Query> query = h.createQuery(sql).bind("tenantId", tenantId); - - if (name != null && !name.isEmpty()) { - logger.debug("binding name: {}", name); - query.bind("name", name); - } - - if (startTime != null) { - query.bind("startTime", startTime); - } - - if (endTime != null) { - query.bind("endTime", endTime); - } - - if (offset != null && !offset.isEmpty()) { - - logger.debug("binding offset: {}", offset); - - try { - - query.bind("offset", Hex.decodeHex(offset.toCharArray())); - - } catch (DecoderException e) { - - throw Exceptions.badRequest("failed to decode offset " + offset, e); - } - - } - - MetricQueries.bindDimensionsToQuery(query, dimensions); - - return query.list(); - - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricQueries.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricQueries.java deleted file mode 100644 index 58894dbd1..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/MetricQueries.java +++ /dev/null @@ -1,356 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.vertica; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import java.sql.Timestamp; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - - -import org.joda.time.DateTime; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; - -import monasca.api.domain.exception.MultipleMetricsException; -import monasca.api.domain.model.measurement.Measurements; - -/** - * Vertica utilities for building metric queries. - */ -final class MetricQueries { - private static final Splitter BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults(); - private static final Splitter UNDERSCORE_SPLITTER = Splitter.on('_').omitEmptyStrings().trimResults(); - private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults(); - - static final String FIND_METRIC_DEFS_SQL = - "SELECT %s TO_HEX(defDims.id) as defDimsId, def.name, dims.name as dName, dims.value AS dValue " - + "FROM MonMetrics.Definitions def " - + "JOIN MonMetrics.DefinitionDimensions defDims ON def.id = defDims.definition_id " - // Outer join needed in case there are no dimensions for a definition. - + "LEFT OUTER JOIN MonMetrics.Dimensions dims ON dims.dimension_set_id = defDims" - + ".dimension_set_id " - + "WHERE TO_HEX(defDims.id) in (%s) " - + "ORDER BY defDims.id ASC"; - - static final String METRIC_DEF_SUB_SQL = - "SELECT TO_HEX(defDimsSub.id) as id " - + "FROM MonMetrics.Definitions as defSub " - + "JOIN MonMetrics.DefinitionDimensions as defDimsSub ON defDimsSub.definition_id = defSub.id " - + "%s " // possible measurements time join here - + "WHERE defSub.tenant_id = :tenantId " - + "%s " // metric name here - + "%s " // dimension and clause here - + "%s " // possible time and clause here - + "GROUP BY defDimsSub.id"; - - private static final String MEASUREMENT_AND_CLAUSE = - "AND time_stamp >= :startTime "; // start or start and end time here - - private static final String MEASUREMENT_JOIN = - "JOIN MonMetrics.Measurements AS meas ON defDimsSub.id = meas.definition_dimensions_id"; - - private static final String TABLE_TO_JOIN_ON = "defDimsSub"; - - private MetricQueries() {} - - static String buildMetricDefinitionSubSql(String name, Map dimensions, - DateTime startTime, DateTime endTime) { - - String namePart = ""; - - if (name != null && !name.isEmpty()) { - namePart = "AND defSub.name = :name "; - } - - return String.format(METRIC_DEF_SUB_SQL, - buildTimeJoin(startTime), - namePart, - buildDimensionAndClause(dimensions, TABLE_TO_JOIN_ON), - buildTimeAndClause(startTime, endTime)); - } - - static String buildDimensionAndClause(Map dimensions, - String tableToJoinName) { - - if (dimensions == null || dimensions.isEmpty()) { - return ""; - } - - StringBuilder sb = new StringBuilder(); - sb.append(" and ").append(tableToJoinName).append( - ".id in ( " - + "SELECT defDimsSub2.id FROM MonMetrics.Dimensions AS dimSub " + - "JOIN MonMetrics.DefinitionDimensions AS defDimsSub2 " + - "ON defDimsSub2.dimension_set_id = dimSub.dimension_set_id" + - " WHERE ("); - - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - - sb.append("(name = :dname").append(i); - - String dim_value = entry.getValue(); - if (!Strings.isNullOrEmpty(dim_value)) { - List values = BAR_SPLITTER.splitToList(dim_value); - - if (values.size() > 1) { - sb.append(" and ( "); - - for (int j = 0; j < values.size(); j++) { - sb.append("value = :dvalue").append(i).append('_').append(j); - - if (j < values.size() - 1) { - sb.append(" or "); - } - } - sb.append(")"); - - } else { - sb.append(" and value = :dvalue").append(i); - } - } - sb.append(")"); - - if (it.hasNext()) { - sb.append(" or "); - } - } - - sb.append(") GROUP BY defDimsSub2.id,dimSub.dimension_set_id HAVING count(*) = ").append(dimensions.size()).append(") "); - - - return sb.toString(); - } - - static String buildTimeAndClause( - DateTime startTime, - DateTime endTime) - { - if (startTime == null) { - return ""; - } - - StringBuilder timeAndClause = new StringBuilder(); - - timeAndClause.append(MEASUREMENT_AND_CLAUSE); - - if (endTime != null) { - timeAndClause.append("AND time_stamp <= :endTime "); - } - - return timeAndClause.toString(); - } - - static String buildTimeJoin(DateTime startTime) - { - if (startTime == null) { - return ""; - } - - return MEASUREMENT_JOIN; - } - - static void bindDimensionsToQuery(Query query, Map dimensions) { - if (dimensions != null) { - int i = 0; - for (Iterator> it = dimensions.entrySet().iterator(); it.hasNext(); i++) { - Map.Entry entry = it.next(); - query.bind("dname" + i, entry.getKey()); - if (!Strings.isNullOrEmpty(entry.getValue())) { - List values = BAR_SPLITTER.splitToList(entry.getValue()); - if (values.size() > 1) { - for (int j = 0; j < values.size(); j++) { - query.bind("dvalue" + i + '_' + j, values.get(j)); - } - } - else { - query.bind("dvalue" + i, entry.getValue()); - } - } - } - } - } - - static void bindOffsetToQuery(Query> query, String offset) { - List offsets = UNDERSCORE_SPLITTER.splitToList(offset); - if (offsets.size() > 1) { - query.bind("offset_id", offsets.get(0)); - query.bind("offset_timestamp", - new Timestamp(DateTime.parse(offsets.get(1)).getMillis())); - } else { - query.bind("offset_timestamp", - new Timestamp(DateTime.parse(offsets.get(0)).getMillis())); - } - } - - static void checkForMultipleDefinitions(Handle h, String tenantId, String name, Map dimensions) - throws MultipleMetricsException { - - String namePart = ""; - if (name != null && !name.isEmpty()) { - namePart = "AND name = :name "; - } - - String sql = String.format(METRIC_DEF_SUB_SQL, - "", - namePart, - buildDimensionAndClause(dimensions, - TABLE_TO_JOIN_ON), - "") + " limit 2"; - - Query> query = h.createQuery(sql); - - query.bind("tenantId", tenantId); - - if (name != null) { - query.bind("name", name); - } - - bindDimensionsToQuery(query, dimensions); - - List> rows = query.list(); - - if (rows.size() > 1) { - throw new MultipleMetricsException(name, dimensions); - } - } - - static void addDefsToResults(Map results, Handle h, String dbHint) { - - StringBuilder sb = new StringBuilder(); - boolean first = true; - for (String id : results.keySet()) { - if (first) { - sb.append("'").append(id).append("'"); - first = false; - } else { - sb.append(',').append("'").append(id).append("'"); - } - } - - String defDimSql = String.format(MetricQueries.FIND_METRIC_DEFS_SQL, - dbHint, - sb.toString()); - - Query> query = h.createQuery(defDimSql); - - List> rows = query.list(); - - String currentDefDimId = null; - - Map dims = null; - - for (Map row : rows) { - - String defDimId = (String) row.get("defDimsId"); - - String defName = (String) row.get("name"); - - String dimName = (String) row.get("dName"); - - String dimValue = (String) row.get("dValue"); - - if (defDimId != null && !defDimId.equals(currentDefDimId)) { - - currentDefDimId = defDimId; - - dims = new HashMap<>(); - - if (dimName != null && dimValue != null) - dims.put(dimName, dimValue); - - results.get(defDimId).setId(defDimId); - - results.get(defDimId).setName(defName); - - results.get(defDimId).setDimensions(dims); - - } else { - - if (dimName != null && dimValue != null) - dims.put(dimName, dimValue); - - } - - } - } - - static Map combineGroupByAndValues(List groupBy, String valueStr) { - List values = COMMA_SPLITTER.splitToList(valueStr); - Map newDimensions = new HashMap<>(); - for (int i = 0; i < groupBy.size(); i++) { - newDimensions.put(groupBy.get(i), values.get(i)); - } - return newDimensions; - } - - static String buildGroupByConcatString(List groupBy) { - if (groupBy.isEmpty() || "*".equals(groupBy.get(0))) - return ""; - - String select = "("; - for (int i = 0; i < groupBy.size(); i++) { - if (i > 0) - select += " || ',' || "; - select += "gb" + i + ".value"; - } - select += ")"; - return select; - } - - static String buildGroupByCommaString(List groupBy) { - String result = ""; - if (!groupBy.contains("*")) { - for (int i = 0; i < groupBy.size(); i++) { - if (i > 0) { - result += ','; - } - result += "gb" + i + ".value"; - } - } - - return result; - } - - static String buildGroupBySql(List groupBy) { - if (groupBy.isEmpty() || "*".equals(groupBy.get(0))) - return ""; - - StringBuilder groupBySql = new StringBuilder( - " JOIN MonMetrics.DefinitionDimensions as dd on dd.id = mes.definition_dimensions_id "); - - for (int i = 0; i < groupBy.size(); i++) { - groupBySql.append("JOIN (SELECT dimension_set_id,value FROM MonMetrics.Dimensions WHERE name = "); - groupBySql.append(":groupBy").append(i).append(") as gb").append(i); - groupBySql.append(" ON gb").append(i).append(".dimension_set_id = dd.dimension_set_id "); - } - - return groupBySql.toString(); - } - - static void bindGroupBy(Query> query, List groupBy) { - int i = 0; - for (String value: groupBy) { - query.bind("groupBy" + i, value); - i++; - } - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/StatisticVerticaRepoImpl.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/StatisticVerticaRepoImpl.java deleted file mode 100644 index b546189a5..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/StatisticVerticaRepoImpl.java +++ /dev/null @@ -1,386 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.domain.exception.MultipleMetricsException; -import monasca.api.domain.model.statistic.StatisticRepo; -import monasca.api.domain.model.statistic.Statistics; -import monasca.api.ApiConfig; - -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.Query; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; - -public class StatisticVerticaRepoImpl implements StatisticRepo { - - private static final Logger logger = - LoggerFactory.getLogger(StatisticVerticaRepoImpl.class); - - public static final DateTimeFormatter DATETIME_FORMATTER = - ISODateTimeFormat.dateTime().withZoneUTC(); - - private final DBI db; - private final String dbHint; - - @Inject - public StatisticVerticaRepoImpl(@Named("vertica") DBI db, - ApiConfig config) - { - this.db = db; - this.dbHint = config.vertica.dbHint; - } - - @Override - public List find( - String tenantId, - String name, - Map dimensions, - DateTime startTime, - DateTime endTime, - List statisticsCols, - int period, - String offset, - int limit, - Boolean mergeMetricsFlag, - List groupBy) throws MultipleMetricsException { - - Map statisticsMap = new HashMap<>(); - - // Sort the column names so that they match the order of the statistics in the results. - List statisticsColumns = createColumnsList(statisticsCols); - - try (Handle h = db.open()) { - - if (groupBy.isEmpty() && !Boolean.TRUE.equals(mergeMetricsFlag)) { - - MetricQueries.checkForMultipleDefinitions(h, tenantId, name, dimensions); - - } - - String sql = createQuery(name, dimensions, period, startTime, endTime, offset, - statisticsCols, mergeMetricsFlag, groupBy); - - logger.debug("vertica sql: {}", sql); - - Query> - query = - h.createQuery(sql) - .bind("tenantId", tenantId) - .bind("start_time", startTime) - .bind("end_time", endTime) - .bind("limit", limit + 1); - - if (name != null && !name.isEmpty()) { - query.bind("name", name); - } - - MetricQueries.bindDimensionsToQuery(query, dimensions); - - if (!groupBy.isEmpty()) { - MetricQueries.bindGroupBy(query, groupBy); - } - - if (offset != null && !offset.isEmpty()) { - logger.debug("binding offset: {}", offset); - - MetricQueries.bindOffsetToQuery(query, offset); - } - - List> rows = query.list(); - - if (rows.size() == 0) { - return new ArrayList<>(); - } - - if (!groupBy.isEmpty() && groupBy.contains("*")) { - - String currentDefId = null; - - for (Map row : rows) { - - List statisticsRow = parseRow(row); - - String defDimsId = (String) row.get("id"); - - if (defDimsId != null && !defDimsId.equals(currentDefId)) { - Statistics newStats = new Statistics(); - newStats.setColumns(statisticsColumns); - - statisticsMap.put(defDimsId, newStats); - currentDefId = defDimsId; - } - - statisticsMap.get(defDimsId).addMeasurement(statisticsRow); - - } - - MetricQueries.addDefsToResults(statisticsMap, h, this.dbHint); - - } else if (!groupBy.isEmpty()) { - - String currentId = null; - - for (Map row : rows) { - - String dimensionValues = (String) row.get("dimension_values"); - - if (dimensionValues != null && !dimensionValues.equals(currentId)) { - currentId = dimensionValues; - - Statistics tmp = new Statistics(); - tmp.setId(dimensionValues); - tmp.setName(name); - tmp.setDimensions(MetricQueries.combineGroupByAndValues(groupBy, dimensionValues)); - - statisticsMap.put(dimensionValues, tmp); - } - - List statisticsRow = parseRow(row); - - statisticsMap.get(dimensionValues).addMeasurement(statisticsRow); - - } - - } else { - - Statistics statistics = new Statistics(); - - statistics.setId(""); - - statistics.setName(name); - - statistics.setColumns(statisticsColumns); - - String firstDefId = (String) rows.get(0).get("id"); - - for (Map row : rows) { - - List statisticsRow = parseRow(row); - - statistics.addMeasurement(statisticsRow); - - } - - statisticsMap.put(firstDefId, statistics); - - if (!Boolean.TRUE.equals(mergeMetricsFlag)) { - statistics.setId(firstDefId); - MetricQueries.addDefsToResults(statisticsMap, h, this.dbHint); - } else { - if (dimensions == null) { - dimensions = new HashMap<>(); - } - statistics.setDimensions(dimensions); - } - } - - } - - List results = new ArrayList<>(statisticsMap.values()); - - Collections.sort(results); - - return results; - } - - private List parseRow(Map row) { - - List statisticsRow = new ArrayList<>(); - - Double sum = (Double) row.get("sum"); - Double average = (Double) row.get("avg"); - Double min = (Double) row.get("min"); - Double max = (Double) row.get("max"); - Long count = (Long) row.get("count"); - Timestamp time_stamp = (Timestamp) row.get("time_interval"); - - if (time_stamp != null) { - statisticsRow.add(DATETIME_FORMATTER.print(time_stamp.getTime())); - } - - if (average != null) { - statisticsRow.add(average); - } - - if (count != null) { - statisticsRow.add(count); - } - - if (max != null) { - statisticsRow.add(max); - } - - if (min != null) { - statisticsRow.add(min); - } - - if (sum != null) { - statisticsRow.add(sum); - } - - return statisticsRow; - } - - List createColumnsList( - List list) { - - List copy = new ArrayList<>(); - for (String string : list) { - copy.add(string); - } - Collections.sort(copy); - copy.add(0, "timestamp"); - - return copy; - } - - private String createQuery( - String name, - Map dimensions, - int period, - DateTime startTime, - DateTime endTime, - String offset, - List statistics, - Boolean mergeMetricsFlag, - List groupBy) { - - StringBuilder sb = new StringBuilder(); - - sb.append("SELECT ").append(this.dbHint).append(" "); - if (!groupBy.isEmpty() && !groupBy.contains("*")) { - - sb.append(MetricQueries.buildGroupByConcatString(groupBy)); - sb.append(" as dimension_values, "); - - } - sb.append(" max(to_hex(definition_dimensions_id)) AS id, "); - sb.append(createColumnsStr(statistics)); - - if (period >= 1) { - sb.append("Time_slice(time_stamp, ").append(period); - sb.append(", 'SECOND', 'START') AS time_interval"); - } - - sb.append(" FROM MonMetrics.Measurements as mes "); - if (!groupBy.isEmpty() && !groupBy.contains("*")) { - - sb.append(MetricQueries.buildGroupBySql(groupBy)); - - } - - sb.append("WHERE TO_HEX(definition_dimensions_id) IN (") - .append(MetricQueries.buildMetricDefinitionSubSql(name, dimensions, null, null)) - .append(") "); - sb.append(createWhereClause(startTime, endTime, offset, groupBy)); - - if (period >= 1) { - sb.append(" group by "); - if (!groupBy.isEmpty() && groupBy.contains("*")) { - - sb.append("definition_dimensions_id, "); - - } else if (!groupBy.isEmpty()) { - - for (int i = 0; i < groupBy.size(); i++) { - sb.append("gb").append(i).append(".value,"); - } - - } - sb.append("time_interval "); - - sb.append(" order by "); - if (!groupBy.isEmpty() && groupBy.contains("*")) { - - sb.append("to_hex(definition_dimensions_id),"); - - } else { - - sb.append(MetricQueries.buildGroupByCommaString(groupBy)); - if (!groupBy.isEmpty()) - sb.append(','); - - } - sb.append("time_interval "); - } - - sb.append(" limit :limit"); - - return sb.toString(); - } - - private String createWhereClause( - DateTime startTime, - DateTime endTime, - String offset, - List groupBy) { - - String s = ""; - - if (startTime != null && endTime != null) { - s = "AND time_stamp >= :start_time AND time_stamp <= :end_time "; - } else if (startTime != null) { - s = "AND time_stamp >= :start_time "; - } - - if (offset != null && !offset.isEmpty()) { - - if (!groupBy.isEmpty()) { - s += " AND (TO_HEX(definition_dimensions_id) > :offset_id " - + "OR (TO_HEX(definition_dimensions_id) = :offset_id AND time_stamp > :offset_timestamp)) "; - } else if (!groupBy.isEmpty()){ - - String concatGroupByString = MetricQueries.buildGroupByConcatString(groupBy); - - s += " AND (" + concatGroupByString + " > :offset_id" + - " OR (" + concatGroupByString + " = :offset_id AND mes.time_stamp > :offset_timestamp)) "; - - } else { - s += " AND time_stamp > :offset_timestamp "; - } - - } - - return s; - } - - private String createColumnsStr( - List statistics) { - - StringBuilder sb = new StringBuilder(); - - for (String statistic : statistics) { - - sb.append(statistic + "(mes.value) as " + statistic + ", "); - } - - return sb.toString(); - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/VerticaDataSourceFactory.java b/java/src/main/java/monasca/api/infrastructure/persistence/vertica/VerticaDataSourceFactory.java deleted file mode 100644 index f50908ac0..000000000 --- a/java/src/main/java/monasca/api/infrastructure/persistence/vertica/VerticaDataSourceFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.vertica; - -import com.fasterxml.jackson.annotation.JsonProperty; -import io.dropwizard.db.DataSourceFactory; - -public class VerticaDataSourceFactory extends DataSourceFactory { - - @JsonProperty - String dbHint = ""; - - public String getDbHint() { - return dbHint; - } - -} diff --git a/java/src/main/java/monasca/api/infrastructure/servlet/MockAuthenticationFilter.java b/java/src/main/java/monasca/api/infrastructure/servlet/MockAuthenticationFilter.java deleted file mode 100644 index c774fae96..000000000 --- a/java/src/main/java/monasca/api/infrastructure/servlet/MockAuthenticationFilter.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.servlet; - -import java.io.IOException; -import java.util.Collections; -import java.util.Enumeration; -import java.util.List; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; - -/** - * Mocks authentication by converting X-Auth-Token headers to X-Tenant-Ids. - */ -public class MockAuthenticationFilter implements Filter { - private static final String X_AUTH_TOKEN_HEADER = "X-Auth-Token"; - - @Override - public void destroy() {} - - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) - throws IOException, ServletException { - final HttpServletRequest req = (HttpServletRequest) request; - HttpServletRequestWrapper wrapper = requestWrapperFor(req); - chain.doFilter(wrapper, response); - } - - @Override - public void init(FilterConfig filterConfig) throws ServletException {} - - /** - * Returns an HttpServletRequestWrapper that serves tenant id headers from request attributes. - */ - private HttpServletRequestWrapper requestWrapperFor(final HttpServletRequest request) { - return new HttpServletRequestWrapper(request) { - @Override - public Object getAttribute(String name) { - if (name.equalsIgnoreCase(PostAuthenticationFilter.X_TENANT_ID_HEADER)) { - String tenantId = request.getHeader(PostAuthenticationFilter.X_TENANT_ID_HEADER); - return tenantId == null ? request.getHeader(X_AUTH_TOKEN_HEADER) : tenantId; - } - if (name.equalsIgnoreCase(PostAuthenticationFilter.X_IDENTITY_STATUS_ATTRIBUTE)) - return PostAuthenticationFilter.CONFIRMED_STATUS; - if (name.equalsIgnoreCase(PostAuthenticationFilter.X_ROLES_ATTRIBUTE)) - return "user"; - return super.getAttribute(name); - } - - @Override - public String getHeader(String name) { - if (name.equalsIgnoreCase(PostAuthenticationFilter.X_TENANT_ID_HEADER)) - return request.getHeader(X_AUTH_TOKEN_HEADER); - return super.getHeader(name); - } - - @Override - public Enumeration getHeaderNames() { - List names = Collections.list(super.getHeaderNames()); - names.add(PostAuthenticationFilter.X_TENANT_ID_HEADER); - return Collections.enumeration(names); - } - - @Override - public Enumeration getHeaders(String name) { - if (name.equalsIgnoreCase(PostAuthenticationFilter.X_TENANT_ID_HEADER)) { - String authToken = request.getHeader(X_AUTH_TOKEN_HEADER); - return authToken == null ? Collections.emptyEnumeration() : Collections - .enumeration(Collections.singleton(authToken)); - } - return super.getHeaders(name); - } - }; - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/servlet/PostAuthenticationFilter.java b/java/src/main/java/monasca/api/infrastructure/servlet/PostAuthenticationFilter.java deleted file mode 100644 index 24f1609c7..000000000 --- a/java/src/main/java/monasca/api/infrastructure/servlet/PostAuthenticationFilter.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.servlet; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Enumeration; -import java.util.List; - -import javax.annotation.Nullable; -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.MediaType; - -import monasca.api.infrastructure.servlet.PreAuthenticationFilter.ErrorCapturingServletResponseWrapper; - -/** - * Authenticates requests using header information from the CsMiddleware. Provides the X-TENANT-ID - * servlet attribute as a request header. Intended to be added to a servlet filter chain after the - * CsMiddleware TokenAuth filter. - */ -public class PostAuthenticationFilter implements Filter { - static final String CONFIRMED_STATUS = "CONFIRMED"; - static final String X_ROLES_ATTRIBUTE = "X-ROLES"; - static final String X_MONASCA_AGENT = "X-MONASCA_AGENT"; - static final String X_IDENTITY_STATUS_ATTRIBUTE = "X-IDENTITY-STATUS"; - private static final String X_TENANT_ID_ATTRIBUTE = "X-PROJECT-ID"; - static final String X_TENANT_ID_HEADER = "X-Tenant-Id"; - static final String X_ROLES_HEADER = "X-Roles"; - - private final List defaultAuthorizedRoles = new ArrayList(); - private final List agentAuthorizedRoles = new ArrayList(); - private final List readOnlyAuthorizedRoles = new ArrayList(); - - public PostAuthenticationFilter(List defaultAuthorizedRoles, - List agentAuthorizedRoles, - List readOnlyAuthorizedRoles) { - for (String defaultRole : defaultAuthorizedRoles) { - this.defaultAuthorizedRoles.add(defaultRole.toLowerCase()); - } - for (String agentRole : agentAuthorizedRoles) { - this.agentAuthorizedRoles.add(agentRole.toLowerCase()); - } - - // - // Check for null here so we can support backward compatibility - // of not setting readOnlyAuthorizedRoles in the config file. - // - if (null != readOnlyAuthorizedRoles) { - for (String readOnlyRole : readOnlyAuthorizedRoles) { - this.readOnlyAuthorizedRoles.add(readOnlyRole.toLowerCase()); - } - } - } - - @Override - public void destroy() {} - - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) { - final HttpServletRequest req = (HttpServletRequest) request; - ErrorCapturingServletResponseWrapper res = (ErrorCapturingServletResponseWrapper) response; - String tenantIdStr = null; - - try { - // According to CORS spec OPTIONS method does not pass auth info - if (req.getMethod().equals("OPTIONS")) { - chain.doFilter(request, response); - return; - } - - Object tenantId = request.getAttribute(X_TENANT_ID_ATTRIBUTE); - - if (tenantId == null) { - sendAuthError(res, null, null, null); - return; - } - tenantIdStr = tenantId.toString(); - - boolean authenticated = isAuthenticated(req); - boolean authorized = isAuthorized(req); - - if (authenticated && authorized) { - HttpServletRequestWrapper wrapper = requestWrapperFor(req); - chain.doFilter(wrapper, response); - return; - } - - if (authorized) - sendAuthError(res, tenantIdStr, null, null); - else - sendAuthError(res, tenantIdStr, "Tenant is missing a required role to access this service", - null); - } catch (Exception e) { - try { - sendAuthError(res, tenantIdStr, null, e); - } catch (IOException ignore) { - } - } - } - - @Override - public void init(FilterConfig filterConfig) throws ServletException {} - - /** - * @return true if the request is authenticated else false - */ - private boolean isAuthenticated(HttpServletRequest request) { - Object identityStatus = request.getAttribute(X_IDENTITY_STATUS_ATTRIBUTE); - return identityStatus != null && CONFIRMED_STATUS.equalsIgnoreCase(identityStatus.toString()); - } - - /** - * @return true if the request is authorized else false - */ - private boolean isAuthorized(HttpServletRequest request) { - Object rolesFromKeystone = request.getAttribute(X_ROLES_ATTRIBUTE); - if (rolesFromKeystone == null) - return false; - - boolean agentUser = false; - boolean readOnlyUser = false; - - for (String role : rolesFromKeystone.toString().split(",")) { - String lowerCaseRole = role.toLowerCase(); - if ((defaultAuthorizedRoles != null) && defaultAuthorizedRoles.contains(lowerCaseRole)) { - return true; - } - if ((agentAuthorizedRoles != null) && agentAuthorizedRoles.contains(lowerCaseRole)) { - agentUser = true; - } - if ((readOnlyAuthorizedRoles != null) && readOnlyAuthorizedRoles.contains(lowerCaseRole)) { - readOnlyUser = true; - } - } - - if (agentUser) { - request.setAttribute(X_MONASCA_AGENT, true); - return true; - } - - if (readOnlyUser && request.getMethod().equals("GET")) { - return true; - } - - return false; - } - - /** - * Returns an HttpServletRequestWrapper that serves tenant id headers from request attributes. - */ - private HttpServletRequestWrapper requestWrapperFor(final HttpServletRequest request) { - return new HttpServletRequestWrapper(request) { - @Override - public String getHeader(String name) { - if (name.equalsIgnoreCase(X_TENANT_ID_HEADER)) - return request.getAttribute(X_TENANT_ID_ATTRIBUTE).toString(); - else if (name.equalsIgnoreCase(X_ROLES_HEADER)) - return request.getAttribute(X_ROLES_ATTRIBUTE).toString(); - return super.getHeader(name); - } - - @Override - public Enumeration getHeaderNames() { - List names = Collections.list(super.getHeaderNames()); - names.add(X_TENANT_ID_HEADER); - names.add(X_ROLES_HEADER); - return Collections.enumeration(names); - } - - @Override - public Enumeration getHeaders(String name) { - if (name.equalsIgnoreCase(X_TENANT_ID_HEADER)) - return Collections.enumeration(Collections.singleton(request.getAttribute( - X_TENANT_ID_ATTRIBUTE).toString())); - else if (name.equalsIgnoreCase(X_ROLES_HEADER)) - return Collections.enumeration(Collections.singleton(request.getAttribute( - X_ROLES_ATTRIBUTE).toString())); - return super.getHeaders(name); - } - }; - } - - private void sendAuthError(ErrorCapturingServletResponseWrapper response, - @Nullable String tenantId, @Nullable String message, @Nullable Exception exception) - throws IOException { - response.setContentType(MediaType.APPLICATION_JSON); - - if (message == null) - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, - tenantId == null ? "Failed to authenticate request" - : "Failed to authenticate request for " + tenantId, exception); - else - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, String.format(message, tenantId)); - } -} diff --git a/java/src/main/java/monasca/api/infrastructure/servlet/PreAuthenticationFilter.java b/java/src/main/java/monasca/api/infrastructure/servlet/PreAuthenticationFilter.java deleted file mode 100644 index 67aecc987..000000000 --- a/java/src/main/java/monasca/api/infrastructure/servlet/PreAuthenticationFilter.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.servlet; - -import java.io.IOException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletOutputStream; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponseWrapper; -import javax.ws.rs.core.MediaType; - -import org.eclipse.jetty.server.Response; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import monasca.api.resource.exception.Exceptions; -import monasca.api.resource.exception.Exceptions.FaultType; - -/** - * Authenticates requests using header information from the CsMiddleware. Provides the X-TENANT-ID - * servlet attribute as a request header. Intended to be added to a servlet filter chain after the - * CsMiddleware TokenAuth filter. - */ -public class PreAuthenticationFilter implements Filter { - private static final Logger LOG = LoggerFactory.getLogger(PreAuthenticationFilter.class); - - static class ErrorCapturingServletResponseWrapper extends HttpServletResponseWrapper { - private int statusCode; - private String errorMessage; - private Exception exception; - - public ErrorCapturingServletResponseWrapper(HttpServletResponse response) { - super(response); - } - - @Override - public void sendError(int statusCode) throws IOException { - this.statusCode = statusCode; - } - - @Override - public void sendError(int statusCode, String msg) throws IOException { - this.statusCode = statusCode; - errorMessage = msg; - } - - void sendError(int statusCode, String msg, Exception exception) throws IOException { - sendError(statusCode, msg); - this.exception = exception; - } - } - - @Override - public void destroy() {} - - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) { - HttpServletResponse res = (HttpServletResponse) response; - ErrorCapturingServletResponseWrapper responseWrapper = - new ErrorCapturingServletResponseWrapper(res); - - boolean caughtException = false; - ServletOutputStream out = null; - try { - out = res.getOutputStream(); - chain.doFilter(request, responseWrapper); - if (responseWrapper.statusCode != 401 && responseWrapper.statusCode != 500) - return; - - } catch (Exception e) { - LOG.error("Error while executing pre authentication filter", e); - caughtException = true; - } - - try { - res.setContentType(MediaType.APPLICATION_JSON); - if (caughtException) { - res.setStatus(Response.SC_INTERNAL_SERVER_ERROR); - } - else { - res.setStatus(responseWrapper.statusCode); - FaultType faultType; - if (responseWrapper.statusCode == 500) { - faultType = FaultType.SERVER_ERROR; - } - else { - faultType = FaultType.UNAUTHORIZED; - } - String output = Exceptions.buildLoggedErrorMessage(faultType, responseWrapper.errorMessage, - null, responseWrapper.exception); - out.print(output); - } - } catch (IllegalArgumentException e) { - // CSMiddleware is throwing this error for invalid tokens. - // This problem appears to be fixed in other versions, but they are not approved yet. - try { - String output = - Exceptions.buildLoggedErrorMessage(FaultType.UNAUTHORIZED, "invalid authToken", null, - responseWrapper.exception); - out.print(output); - } catch (Exception x) { - LOG.error("Error while writing failed authentication HTTP response", x); - } - } catch (Exception e) { - LOG.error("Error while writing failed authentication HTTP response", e); - } finally { - if (out != null) - try { - out.close(); - } catch (IOException ignore) { - } - } - } - - @Override - public void init(FilterConfig filterConfig) throws ServletException {} -} diff --git a/java/src/main/java/monasca/api/infrastructure/servlet/RoleAuthorizationFilter.java b/java/src/main/java/monasca/api/infrastructure/servlet/RoleAuthorizationFilter.java deleted file mode 100644 index 9405200f2..000000000 --- a/java/src/main/java/monasca/api/infrastructure/servlet/RoleAuthorizationFilter.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.servlet; - -import monasca.api.resource.exception.Exceptions; -import monasca.common.middleware.AuthConstants; - -import com.sun.jersey.spi.container.ContainerRequest; -import com.sun.jersey.spi.container.ContainerRequestFilter; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.core.Context; - -import static monasca.api.infrastructure.servlet.PostAuthenticationFilter.X_MONASCA_AGENT; - -public class RoleAuthorizationFilter implements ContainerRequestFilter { - private static final Logger logger = LoggerFactory.getLogger - (ContainerRequestFilter.class); - @Context - private HttpServletRequest httpServletRequest; - private static final String[] VALID_MONASCA_AGENT_POST_PATHS = new String[] { "/v2.0/metrics" }; - private static final String[] VALID_MONASCA_AGENT_GET_PATHS = new String[] { "/", "/v2.0" }; - - @Override - public ContainerRequest filter(ContainerRequest containerRequest) { - String method = containerRequest.getMethod(); - Object isAgent = httpServletRequest.getAttribute(X_MONASCA_AGENT); - String pathInfo = httpServletRequest.getPathInfo(); - - // X_MONASCA_AGENT is only set if the only valid role for this user is an agent role - if (isAgent != null) { - if (!(method.equals("POST") && validPath(pathInfo, VALID_MONASCA_AGENT_POST_PATHS)) && - !(method.equals("GET") && validPath(pathInfo, VALID_MONASCA_AGENT_GET_PATHS))) { - logger.warn("User {} is missing a required role to {} on {}", - httpServletRequest.getAttribute(AuthConstants.AUTH_USER_NAME), - method, pathInfo); - throw Exceptions.badRequest("User is missing a required role to perform this request"); - } - } - return containerRequest; - } - - private boolean validPath(String pathInfo, String[] paths) { - // Make the comparison easier by getting rid of trailing slashes - while (!pathInfo.isEmpty() && !"/".equals(pathInfo) && pathInfo.endsWith("/")) { - pathInfo = pathInfo.substring(0, pathInfo.length() - 1); - } - for (final String validPath : paths) { - if (validPath.equals(pathInfo)) { - return true; - } - } - return false; - } -} diff --git a/java/src/main/java/monasca/api/resource/AlarmDefinitionResource.java b/java/src/main/java/monasca/api/resource/AlarmDefinitionResource.java deleted file mode 100644 index 47180ec3d..000000000 --- a/java/src/main/java/monasca/api/resource/AlarmDefinitionResource.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import com.google.common.base.Strings; - -import com.codahale.metrics.annotation.Timed; -import com.fasterxml.jackson.databind.JsonMappingException; - -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriInfo; - -import monasca.api.app.AlarmDefinitionService; -import monasca.api.app.command.CreateAlarmDefinitionCommand; -import monasca.api.app.command.PatchAlarmDefinitionCommand; -import monasca.api.app.command.UpdateAlarmDefinitionCommand; -import monasca.api.app.validation.AlarmValidation; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.annotation.PATCH; -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.alarm.AlarmSeverity; - -/** - * Alarm definition resource implementation. - */ -@Path(AlarmDefinitionResource.ALARM_DEFINITIONS_PATH) -public class AlarmDefinitionResource { - private final AlarmDefinitionService service; - private final AlarmDefinitionRepo repo; - private final PersistUtils persistUtils; - public final static String ALARM_DEFINITIONS = "alarm-definitions"; - public final static String ALARM_DEFINITIONS_PATH = "/v2.0/" + ALARM_DEFINITIONS; - private final static List ALLOWED_SORT_BY = Arrays.asList("id", "name", "severity", - "updated_at", "created_at"); - - @Inject - public AlarmDefinitionResource(AlarmDefinitionService service, - AlarmDefinitionRepo repo, - PersistUtils persistUtils) { - this.service = service; - this.repo = repo; - this.persistUtils = persistUtils; - } - - @POST - @Timed - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public Response create(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @Valid CreateAlarmDefinitionCommand command) { - command.validate(); - AlarmExpression alarmExpression = AlarmValidation.validateNormalizeAndGet(command.expression); - AlarmDefinition alarm = - Links.hydrate(service.create(tenantId, command.name, command.description, command.severity, - command.expression, alarmExpression, command.matchBy, command.alarmActions, - command.okActions, command.undeterminedActions), uriInfo, false); - return Response.created(URI.create(alarm.getId())).entity(alarm).build(); - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object list(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, @QueryParam("name") String name, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("severity") String severityStr, - @QueryParam("sort_by") String sortByStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) throws UnsupportedEncodingException { - Map dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - - List sortByList = Validation.parseAndValidateSortBy(sortByStr, ALLOWED_SORT_BY); - if (!Strings.isNullOrEmpty(offset)) { - Validation.parseAndValidateNumber(offset, "offset"); - } - - List severityList = Validation.parseAndValidateSeverity(severityStr); - - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = repo.find(tenantId, - name, - dimensions, - severityList, - sortByList, - offset, - paging_limit - ); - return Links.paginateAlarming(paging_limit, Links.hydrate(resources, uriInfo), uriInfo); - } - - @GET - @Timed - @Path("/{alarm_definition_id}") - @Produces(MediaType.APPLICATION_JSON) - public AlarmDefinition get( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_definition_id") String alarmDefinitionId) { - return Links.hydrate(repo.findById(tenantId, alarmDefinitionId), uriInfo, true); - } - - @PUT - @Timed - @Path("/{alarm_definition_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public AlarmDefinition update(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_definition_id") String alarmDefinitionId, - @Valid UpdateAlarmDefinitionCommand command) { - command.validate(); - AlarmExpression alarmExpression = AlarmValidation.validateNormalizeAndGet(command.expression); - return Links.hydrate(service.update(tenantId, alarmDefinitionId, alarmExpression, command), - uriInfo, true); - } - - @PATCH - @Timed - @Path("/{alarm_definition_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public AlarmDefinition patch(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_definition_id") String alarmDefinitionId, - @Valid PatchAlarmDefinitionCommand command) throws JsonMappingException { - command.validate(); - AlarmExpression alarmExpression = - command.expression == null ? null : AlarmValidation - .validateNormalizeAndGet(command.expression); - - return Links.hydrate(service.patch(tenantId, alarmDefinitionId, command.name, - command.description, command.severity, command.expression, - alarmExpression, command.matchBy, command.actionsEnabled, - command.alarmActions, command.okActions, - command.undeterminedActions), - uriInfo, true); - } - - @DELETE - @Timed - @Path("/{alarm_definition_id}") - public void delete(@HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_definition_id") String alarmDefinitionId) { - service.delete(tenantId, alarmDefinitionId); - } -} diff --git a/java/src/main/java/monasca/api/resource/AlarmResource.java b/java/src/main/java/monasca/api/resource/AlarmResource.java deleted file mode 100644 index 6208807e6..000000000 --- a/java/src/main/java/monasca/api/resource/AlarmResource.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright (c) 2014-2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; - -import com.codahale.metrics.annotation.Timed; -import com.fasterxml.jackson.databind.JsonMappingException; - -import org.hibernate.validator.constraints.NotEmpty; -import org.joda.time.DateTime; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.app.AlarmService; -import monasca.api.app.command.UpdateAlarmCommand; -import monasca.api.app.validation.MetricNameValidation; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmCount; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.annotation.PATCH; -import monasca.api.resource.exception.Exceptions; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; - -/** - * Alarm resource implementation. - */ -@Path("/v2.0/alarms") -public class AlarmResource { - private final AlarmService service; - private final AlarmRepo repo; - private final PersistUtils persistUtils; - private final AlarmStateHistoryRepo stateHistoryRepo; - - private final static List ALLOWED_GROUP_BY = Arrays.asList("alarm_definition_id", "name", - "state", "severity", "link", - "lifecycle_state", - "metric_name", "dimension_name", - "dimension_value"); - private final static List ALLOWED_SORT_BY = Arrays.asList("alarm_id", "alarm_definition_id", - "alarm_definition_name", "state", - "severity", "lifecycle_state", "link", - "state_updated_timestamp", "updated_timestamp", - "created_timestamp"); - - @Inject - public AlarmResource(AlarmService service, AlarmRepo repo, - AlarmStateHistoryRepo stateHistoryRepo, - PersistUtils persistUtils) { - this.service = service; - this.repo = repo; - this.stateHistoryRepo = stateHistoryRepo; - this.persistUtils = persistUtils; - } - - @DELETE - @Timed - @Path("/{alarm_id}") - public void delete(@HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_id") String alarmId) { - service.delete(tenantId, alarmId); - } - - @GET - @Timed - @Path("/{alarm_id}") - @Produces(MediaType.APPLICATION_JSON) - public Alarm get( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, @PathParam("alarm_id") String alarm_id) { - return fixAlarmLinks(uriInfo, repo.findById(tenantId, alarm_id)); - } - - private Alarm fixAlarmLinks(UriInfo uriInfo, Alarm alarm) { - Links.hydrate(alarm.getAlarmDefinition(), uriInfo, - AlarmDefinitionResource.ALARM_DEFINITIONS_PATH); - return Links.hydrate(alarm, uriInfo, true); - } - - @GET - @Timed - @Path("/{alarm_id}/state-history") - @Produces(MediaType.APPLICATION_JSON) - public Object getStateHistory(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, @PathParam("alarm_id") String alarmId, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) - throws Exception { - final int paging_limit = this.persistUtils.getLimit(limit); - final List resource = stateHistoryRepo.findById(tenantId, - alarmId, - offset, - paging_limit - ); - return Links.paginate(paging_limit, resource, uriInfo); - } - - @GET - @Timed - @Path("/state-history") - @Produces(MediaType.APPLICATION_JSON) - public Object listStateHistory( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("start_time") String startTimeStr, - @QueryParam("end_time") String endTimeStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) - throws Exception { - - // Validate query parameters - DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", false); - - DateTime endTime = Validation.parseAndValidateDate(endTimeStr, "end_time", false); - - Validation.parseAndValidateDate(offset, "offset", false); - - if (startTime != null) { - Validation.validateTimes(startTime, endTime); - } - - Map dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = stateHistoryRepo.find(tenantId, - dimensions, - startTime, - endTime, - offset, - paging_limit - ); - return Links.paginate(paging_limit, resources, uriInfo); - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object list(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @QueryParam("alarm_definition_id") String alarmDefId, - @QueryParam("metric_name") String metricName, - @QueryParam("metric_dimensions") String metricDimensionsStr, - @QueryParam("state") AlarmState state, - @QueryParam("severity") String severity, - @QueryParam("lifecycle_state") String lifecycleState, - @QueryParam("link") String link, - @QueryParam("state_updated_start_time") String stateUpdatedStartStr, - @QueryParam("sort_by") String sortBy, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) - throws Exception { - - Map metricDimensions = - Strings.isNullOrEmpty(metricDimensionsStr) ? null : Validation - .parseAndValidateDimensions(metricDimensionsStr); - MetricNameValidation.validate(metricName, false); - DateTime stateUpdatedStart = - Validation.parseAndValidateDate(stateUpdatedStartStr, - "state_updated_start_time", false); - - List sortByList = Validation.parseAndValidateSortBy(sortBy, ALLOWED_SORT_BY); - if (!Strings.isNullOrEmpty(offset)) { - Validation.parseAndValidateNumber(offset, "offset"); - } - List severityList = Validation.parseAndValidateSeverity(severity); - - final int paging_limit = this.persistUtils.getLimit(limit); - final List alarms = repo.find(tenantId, alarmDefId, metricName, metricDimensions, state, - severityList, lifecycleState, link, stateUpdatedStart, sortByList, - offset, paging_limit, true); - for (final Alarm alarm : alarms) { - Links.hydrate( - alarm.getAlarmDefinition(), - uriInfo, - AlarmDefinitionResource.ALARM_DEFINITIONS_PATH - ); - } - return Links.paginateAlarming(paging_limit, Links.hydrate(alarms, uriInfo), uriInfo); - } - - - - @PATCH - @Timed - @Path("/{alarm_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public Alarm patch(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_id") String alarmId, @NotEmpty Map fields) - throws JsonMappingException { - String stateStr = fields.get("state"); - String lifecycleState = fields.get("lifecycle_state"); - String link = fields.get("link"); - AlarmState state = - stateStr == null ? null : Validation.parseAndValidate(AlarmState.class, stateStr); - Validation.validateLifecycleState(lifecycleState); - Validation.validateLink(link); - - return fixAlarmLinks(uriInfo, service.patch(tenantId, alarmId, state, lifecycleState, link)); - } - - @PUT - @Timed - @Path("/{alarm_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public Alarm update(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("alarm_id") String alarmId, @Valid UpdateAlarmCommand command) { - - Validation.validateLifecycleState(command.lifecycleState); - Validation.validateLink(command.link); - - return fixAlarmLinks(uriInfo, service.update(tenantId, alarmId, command)); - } - - @GET - @Timed - @Path("/count") - @Produces(MediaType.APPLICATION_JSON) - public Object getCount(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, @PathParam("alarm_id") String alarmId, - @QueryParam("alarm_definition_id") String alarmDefId, - @QueryParam("metric_name") String metricName, - @QueryParam("metric_dimensions") String metricDimensionsStr, - @QueryParam("state") AlarmState state, - @QueryParam("severity") String severity, - @QueryParam("lifecycle_state") String lifecycleState, - @QueryParam("link") String link, - @QueryParam("state_updated_start_time") String stateUpdatedStartStr, - @QueryParam("group_by") String groupByStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) - throws Exception { - Map metricDimensions = - Strings.isNullOrEmpty(metricDimensionsStr) ? null : Validation - .parseAndValidateDimensions(metricDimensionsStr); - MetricNameValidation.validate(metricName, false); - DateTime stateUpdatedStart = - Validation.parseAndValidateDate(stateUpdatedStartStr, - "state_updated_start_time", false); - List severityList = Validation.parseAndValidateSeverity(severity); - List groupBy = (Strings.isNullOrEmpty(groupByStr)) ? null : parseAndValidateGroupBy( - groupByStr); - - if (offset != null) { - Validation.parseAndValidateNumber(offset, "offset"); - } - - final int paging_limit = this.persistUtils.getLimit(limit); - final AlarmCount resource = repo.getAlarmsCount(tenantId, - alarmDefId, - metricName, - metricDimensions, - state, - severityList, - lifecycleState, - link, - stateUpdatedStart, - groupBy, - offset, - paging_limit); - Links.paginateAlarmCount(resource, paging_limit, uriInfo); - return resource; - } - - private List parseAndValidateGroupBy(String groupByStr) { - List groupBy = null; - if (!Strings.isNullOrEmpty(groupByStr)) { - groupBy = Lists.newArrayList(Splitter.on(',').omitEmptyStrings().trimResults().split(groupByStr)); - if (!ALLOWED_GROUP_BY.containsAll(groupBy)) { - throw Exceptions.unprocessableEntity("Unprocessable Entity", "Invalid group_by field"); - } - } - return groupBy; - } -} diff --git a/java/src/main/java/monasca/api/resource/DimensionResource.java b/java/src/main/java/monasca/api/resource/DimensionResource.java deleted file mode 100644 index f22f72ac8..000000000 --- a/java/src/main/java/monasca/api/resource/DimensionResource.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import static monasca.api.app.validation.Validation.DEFAULT_ADMIN_ROLE; - - -import com.codahale.metrics.annotation.Timed; - -import java.util.List; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.ApiConfig; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.dimension.DimensionName; -import monasca.api.domain.model.dimension.DimensionRepo; -import monasca.api.domain.model.dimension.DimensionValue; -import monasca.api.infrastructure.persistence.PersistUtils; - -/** - * Dimension resource implementation. - */ -@Path("/v2.0/metrics/dimensions") -public class DimensionResource { - - private final DimensionRepo repo; - private final PersistUtils persistUtils; - private final String adminRole; - - @Inject - public DimensionResource(ApiConfig config, DimensionRepo repo, PersistUtils persistUtils) { - this.adminRole = (config.middleware == null || config.middleware.adminRole == null) - ? DEFAULT_ADMIN_ROLE : config.middleware.adminRole; - this.repo = repo; - this.persistUtils = persistUtils; - } - - @GET - @Path("/names/values") - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object getDimensionValues( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("limit") String limit, - @QueryParam("dimension_name") String dimensionName, - @QueryParam("metric_name") String metricName, - @QueryParam("offset") String offset, - @QueryParam("tenant_id") String crossTenantId) throws Exception - { - Validation.validateNotNullOrEmpty(dimensionName, "dimension_name"); - final int pagingLimit = this.persistUtils.getLimit(limit); - String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, adminRole); - List dimValues = repo.findValues(metricName, queryTenantId, dimensionName, offset, pagingLimit); - return Links.paginate(pagingLimit, dimValues, uriInfo); - } - - @GET - @Path("/names") - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object getDimensionNames( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("limit") String limit, - @QueryParam("metric_name") String metricName, - @QueryParam("offset") String offset, - @QueryParam("tenant_id") String crossTenantId) throws Exception - { - final int paging_limit = this.persistUtils.getLimit(limit); - String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, adminRole); - List dimNames = repo.findNames(metricName, queryTenantId, offset, paging_limit); - return Links.paginate(paging_limit, dimNames, uriInfo); - } -} diff --git a/java/src/main/java/monasca/api/resource/Links.java b/java/src/main/java/monasca/api/resource/Links.java deleted file mode 100644 index 5edefbdb4..000000000 --- a/java/src/main/java/monasca/api/resource/Links.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.List; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.UriInfo; - -import com.google.common.base.Preconditions; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.alarm.AlarmCount; -import monasca.api.domain.model.common.Paged; -import monasca.api.domain.model.dimension.DimensionBase; -import monasca.api.domain.model.measurement.Measurements; -import monasca.common.model.domain.common.AbstractEntity; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Linked; -import monasca.common.util.Injector; - -/** - * Utilities for working with links. - */ -public final class Links { - static boolean accessedViaHttps; - - static { - ApiConfig config = Injector.getInstance(ApiConfig.class); - if (config != null && config.accessedViaHttps != null) - accessedViaHttps = config.accessedViaHttps; - } - - /** - * Hydrates the {@code resources} with links for the {@code uriInfo}. - * - * @throws NullPointerException if {@code resource} is null - */ - public static List hydrate(List resources, - UriInfo uriInfo, String... children) { - Preconditions.checkNotNull(resources, "resources"); - - // Safe since this path should not be specific to a resource - String absolutePath = prefixForHttps(uriInfo.getAbsolutePath().toString()); - for (T resource : resources) - hydrate(resource, absolutePath, false, children); - return resources; - } - - /** - * Hydrates the {@code resource} with links for the {@code uriInfo}. - * - * @param resource to obtain id from - * @param uriInfo to obtain path from - * @throws NullPointerException if {@code resource} is null - */ - public static T hydrate(T resource, UriInfo uriInfo) { - return hydrate(resource, prefixForHttps(uriInfo.getAbsolutePath().toString()), false); - } - - /** - * Hydrates the {@code resource} with links for the {@code uriInfo}. - * - * @param resource to obtain id from - * @param uriInfo to obtain base path from - * @param resourcePath path to type of resource - * @throws NullPointerException if {@code resource} is null - */ - public static T hydrate(T resource, UriInfo uriInfo, String resourcePath) { - return hydrate(resource, concatPaths(uriInfo.getBaseUri().toString(), resourcePath) + "/", false); - } - - private static String concatPaths(final String first, final String second) { - // Check if this would cause two slashes in a row or a slash at the start - if ((first.isEmpty() || first.endsWith("/")) && !second.isEmpty() && second.startsWith("/")) { - return first + second.substring(1); - } - else { - return first + second; - } - } - - /** - * Hydrates the {@code resource} with links for the {@code uriInfo}. - * - * @param resource to obtain id from - * @param uriInfo to obtain path from - * @param uriInfoForSpecificResource whether the uriInfo is for a specific resource - * @param children child link elements to create - * @throws NullPointerException if {@code resource} is null - */ - public static T hydrate(T resource, UriInfo uriInfo, - boolean uriInfoForSpecificResource, String... children) { - return hydrate(resource, prefixForHttps(uriInfo.getAbsolutePath().toString()), - uriInfoForSpecificResource, children); - } - - /** - * Returns a string that is prefixed for prefixForHttp if https is being used. - */ - static String prefixForHttps(String path) { - if (accessedViaHttps && !path.toLowerCase().startsWith("https")) - path = "https" + path.substring(path.indexOf("://")); - return path; - } - - /** - * Hydrates the {@code resource} with links for the {@code path}. - * - * @throws NullPointerException if {@code resource} is null - */ - private static T hydrate(T resource, String path, - boolean pathForSpecificResource, String... children) { - Preconditions.checkNotNull(resource, "resource"); - - List links = new ArrayList<>(children.length + 1); - if (!pathForSpecificResource) { - boolean pathEndsInSlash = path.length() > 0 && path.charAt(path.length() - 1) == '/'; - if (!pathEndsInSlash) - path += "/"; - path += resource.getId(); - } - - links.add(new Link("self", path)); - for (String child : children) - links.add(new Link(child, path + "/" + child)); - - resource.setLinks(links); - return resource; - } - - - /** - * This method handles the case that the elements list size is one greater than the - * limit. The next link will be created automatically. - * - * This method also handles the case that the element size is the limit. The next - * link will not be created. - * - * The convention is for methods that query the DB to request limit + 1 elements. - * - * Only limit number of elements will be returned. - * - * @param limit - * @param elements - * @param uriInfo - * @return - */ - public static Paged paginate(int limit, List elements, UriInfo uriInfo) - throws UnsupportedEncodingException { - - // Check for paging turned off. Happens if maxQueryLimit is not set or is set to zero. - if (limit == 0) { - Paged paged = new Paged(); - paged.elements = elements != null ? elements : new ArrayList<>(); - return paged; - } - - Paged paged = new Paged(); - - paged.links.add(getSelfLink(uriInfo)); - - if (elements != null) { - - if (elements.size() > limit) { - - String offset = elements.get(limit - 1).getId(); - - paged.links.add(getNextLink(offset, uriInfo)); - - // Truncate the list. Normally this will just truncate one extra element. - elements = elements.subList(0, limit); - } - - paged.elements = elements; - - } else { - - paged.elements = new ArrayList<>(); - - } - - return paged; - - } - - public static Object paginateAlarming(int limit, List elements, UriInfo uriInfo) - throws UnsupportedEncodingException { - - // Check for paging turned off. Happens if maxQueryLimit is not set or is set to zero. - if (limit == 0) { - Paged paged = new Paged(); - paged.elements = elements != null ? elements : new ArrayList<>(); - return paged; - } - - Paged paged = new Paged(); - - paged.links.add(getSelfLink(uriInfo)); - - if (elements != null) { - - if (elements.size() > limit) { - - MultivaluedMap queryParams = uriInfo.getQueryParameters(); - int offset = 0; - if (queryParams.containsKey("offset")) { - offset = Integer.parseInt(queryParams.get("offset").get(0)); - } - - String nextOffset = String.valueOf(limit + offset); - - paged.links.add(getNextLink(nextOffset, uriInfo)); - - // Truncate the list. Normally this will just truncate one extra element. - elements = elements.subList(0, limit); - } - - paged.elements = elements; - - } else { - - paged.elements = new ArrayList<>(); - - } - - return paged; - - } - - public static Object paginateMeasurements(int limit, List elements, UriInfo uriInfo) - throws UnsupportedEncodingException { - - // Check for paging turned off. Happens if maxQueryLimit is not set or is set to zero. - if (limit == 0) { - Paged paged = new Paged(); - paged.elements = elements != null ? elements : new ArrayList<>(); - return paged; - } - - Paged paged = new Paged(); - - paged.links.add(getSelfLink(uriInfo)); - - if (elements != null && !elements.isEmpty()) { - - int remaining_limit = limit; - - for (int i = 0; i < elements.size(); i++) { - - Measurements s = elements.get(i); - - if (s != null) { - - List> l = s.getMeasurements(); - - if (l.size() >= remaining_limit) { - - String offset = s.getId(); - - if (offset != null) { - offset += '_' + (String) l.get(remaining_limit - 1).get(0); - } else { - offset = (String) l.get(remaining_limit - 1).get(0); - } - - paged.links.add(getNextLink(offset, uriInfo)); - - // Truncate the measurement list. Normally this will just truncate one extra element. - l = l.subList(0, remaining_limit); - s.setMeasurements(l); - - // Truncate the elements list - elements = elements.subList(0, i + 1); - - } else { - remaining_limit -= l.size(); - } - - paged.elements = elements; - - } else { - - paged.elements = new ArrayList<>(); - - } - } - - } else { - - paged.elements = new ArrayList<>(); - } - - return paged; - - } - - - - private static Link getSelfLink(UriInfo uriInfo) { - - Link selfLink = new Link(); - selfLink.rel = "self"; - selfLink.href = prefixForHttps(uriInfo.getRequestUri().toString()); - return selfLink; - } - - private static Link getNextLink(String offset, UriInfo uriInfo) - throws UnsupportedEncodingException { - - Link nextLink = new Link(); - nextLink.rel = "next"; - - // Create a new URL with the new offset. - nextLink.href = prefixForHttps(uriInfo.getAbsolutePath().toString() - + "?offset=" + URLEncoder.encode(offset, "UTF-8")); - - // Add the query parms back to the URL without the original offset. - for (String parmKey : uriInfo.getQueryParameters().keySet()) { - - if (!parmKey.equalsIgnoreCase("offset")) { - - List parmValList = uriInfo.getQueryParameters().get(parmKey); - for (String parmVal : parmValList) { - - nextLink.href += - "&" + URLEncoder.encode(parmKey, "UTF-8") + "=" + URLEncoder.encode(parmVal, "UTF-8"); - - } - } - } - - return nextLink; - } - - public static void paginateAlarmCount(AlarmCount alarmCount, int limit, UriInfo uriInfo) - throws UnsupportedEncodingException { - List links = new ArrayList<>(); - links.add(getSelfLink(uriInfo)); - if (alarmCount.getCounts().size() > limit) { - alarmCount.getCounts().remove(alarmCount.getCounts().size()-1); - String offset = String.valueOf(limit); - links.add(getNextLink(offset, uriInfo)); - } - - alarmCount.setLinks(links); - } - -} diff --git a/java/src/main/java/monasca/api/resource/MeasurementResource.java b/java/src/main/java/monasca/api/resource/MeasurementResource.java deleted file mode 100644 index 491e5efba..000000000 --- a/java/src/main/java/monasca/api/resource/MeasurementResource.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import static monasca.api.app.validation.Validation.DEFAULT_ADMIN_ROLE; - -import com.google.common.base.Strings; - -import com.codahale.metrics.annotation.Timed; - -import org.joda.time.DateTime; - -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.app.validation.MetricNameValidation; -import monasca.api.ApiConfig; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.measurement.MeasurementRepo; -import monasca.api.domain.model.measurement.Measurements; -import monasca.api.infrastructure.persistence.PersistUtils; - -/** - * Measurement resource implementation. - */ -@Path("/v2.0/metrics/measurements") -public class MeasurementResource { - - private final MeasurementRepo repo; - private final PersistUtils persistUtils; - private final String admin_role; - - @Inject - public MeasurementResource(ApiConfig config, MeasurementRepo repo, PersistUtils persistUtils) { - this.admin_role = (config.middleware == null || config.middleware.adminRole == null) - ? DEFAULT_ADMIN_ROLE : config.middleware.adminRole; - this.repo = repo; - this.persistUtils = persistUtils; - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object get( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("name") String name, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("start_time") String startTimeStr, - @QueryParam("end_time") String endTimeStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit, - @QueryParam("tenant_id") String crossTenantId, - @QueryParam("merge_metrics") String mergeMetricsFlag, - @QueryParam("group_by") String groupByStr) throws Exception { - - // Validate query parameters - DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", true); - DateTime endTime = Validation.parseAndValidateDate(endTimeStr, "end_time", false); - Validation.validateTimes(startTime, endTime); - Map - dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - MetricNameValidation.validate(name, true); - Boolean mergeMetricsFlagBool = Validation.validateAndParseMergeMetricsFlag(mergeMetricsFlag); - List groupBy = Validation.parseAndValidateMetricsGroupBy(groupByStr); - - String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role); - - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = repo.find(queryTenantId, - name, - dimensions, - startTime, - endTime, - offset, - paging_limit, - mergeMetricsFlagBool, - groupBy - ); - return Links.paginateMeasurements(paging_limit, resources, uriInfo); - } - -} diff --git a/java/src/main/java/monasca/api/resource/MetricResource.java b/java/src/main/java/monasca/api/resource/MetricResource.java deleted file mode 100644 index ba6635b1c..000000000 --- a/java/src/main/java/monasca/api/resource/MetricResource.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import static monasca.api.app.validation.Validation.DEFAULT_ADMIN_ROLE; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import com.codahale.metrics.annotation.Timed; - -import org.joda.time.DateTime; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.ApiConfig; -import monasca.api.app.MetricService; -import monasca.api.app.command.CreateMetricCommand; -import monasca.api.app.validation.MetricNameValidation; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.domain.model.metric.MetricName; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.exception.Exceptions; -import monasca.common.model.Services; -import monasca.common.model.metric.Metric; -import monasca.common.model.metric.MetricDefinition; - -/** - * Metric resource implementation. - */ -@Path("/v2.0/metrics") -public class MetricResource { - - private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults(); - - private final String monitoring_delegate_role; - private final String admin_role; - private final MetricService service; - private final MetricDefinitionRepo metricRepo; - private final PersistUtils persistUtils; - - @Inject - public MetricResource(ApiConfig config, MetricService service, MetricDefinitionRepo metricRepo, - PersistUtils persistUtils) { - - this.monitoring_delegate_role = (config.middleware == null || config.middleware.delegateAuthorizedRole == null) - ? "monitoring-delegate" : config.middleware.delegateAuthorizedRole; - - this.admin_role = (config.middleware == null || config.middleware.adminRole == null) - ? DEFAULT_ADMIN_ROLE : config.middleware.adminRole; - - this.service = service; - this.metricRepo = metricRepo; - this.persistUtils = persistUtils; - } - - @POST - @Timed - @Consumes(MediaType.APPLICATION_JSON) - public void create(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("tenant_id") String crossTenantId, - @Valid CreateMetricCommand[] commands) { - boolean - isDelegate = - !Strings.isNullOrEmpty(roles) && COMMA_SPLITTER.splitToList(roles) - .contains(monitoring_delegate_role); - boolean - isAdmin = - !Strings.isNullOrEmpty(roles) && COMMA_SPLITTER.splitToList(roles) - .contains(admin_role); - List metrics = new ArrayList<>(commands.length); - for (CreateMetricCommand command : commands) { - if (!isDelegate) { - if (command.dimensions != null) { - String service = command.dimensions.get(Services.SERVICE_DIMENSION); - if (service != null && Services.isReserved(service)) { - throw Exceptions - .forbidden("Project %s cannot POST metrics for the hpcs service", tenantId); - } - } - if (Validation.isCrossProjectRequest(crossTenantId, tenantId)) { - throw Exceptions.forbidden("Project %s cannot POST cross tenant metrics", tenantId); - } - } - command.validate(!isAdmin); - - metrics.add(command.toMetric()); - } - - service.create(metrics, tenantId, crossTenantId); - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object getMetrics(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("name") String name, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit, - @QueryParam("start_time") String startTimeStr, - @QueryParam("end_time") String endTimeStr, - @QueryParam("tenant_id") String crossTenantId) throws Exception - { - Map - dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - MetricNameValidation.validate(name, false); - - DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", false); - DateTime endTime = Validation.parseAndValidateDate(endTimeStr, "end_time", false); - - if ((startTime != null) && (endTime != null)) { - // - // If both times are specified, make sure start is before end - // - Validation.validateTimes(startTime, endTime); - } - - final String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, - admin_role); - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = metricRepo.find( - queryTenantId, - name, - dimensions, - startTime, - endTime, - offset, - paging_limit - ); - - return Links.paginate(paging_limit, resources, uriInfo); - } - - @GET - @Path("/names") - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object getMetricNames(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit, - @QueryParam("tenant_id") String crossTenantId) throws Exception - { - Map - dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - - String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role); - - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = metricRepo.findNames( - queryTenantId, - dimensions, - offset, - paging_limit - ); - return Links.paginate(paging_limit, resources, uriInfo); - } - -} diff --git a/java/src/main/java/monasca/api/resource/NotificationMethodResource.java b/java/src/main/java/monasca/api/resource/NotificationMethodResource.java deleted file mode 100644 index f855464a2..000000000 --- a/java/src/main/java/monasca/api/resource/NotificationMethodResource.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import com.codahale.metrics.annotation.Timed; -import com.google.common.base.Strings; - -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.util.Arrays; -import java.util.List; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriInfo; - -import monasca.api.ApiConfig; -import monasca.api.app.command.CreateNotificationMethodCommand; -import monasca.api.app.command.PatchNotificationMethodCommand; -import monasca.api.app.command.UpdateNotificationMethodCommand; -import monasca.api.app.validation.NotificationMethodValidation; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.annotation.PATCH; - -/** - * Notification Method resource implementation. - */ -@Path("/v2.0/notification-methods") -public class NotificationMethodResource { - private final NotificationMethodRepo repo; - private final PersistUtils persistUtils; - private final static List ALLOWED_SORT_BY = Arrays.asList("id", "name", "type", - "address", "updated_at", - "created_at"); - - private final List validPeriods; - - - @Inject - public NotificationMethodResource(ApiConfig config, NotificationMethodRepo repo, - PersistUtils persistUtils) { - this.repo = repo; - this.persistUtils = persistUtils; - this.validPeriods = config.validNotificationPeriods == null ? Arrays.asList(0, 60): - config.validNotificationPeriods; - - } - - @POST - @Timed - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public Response create(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @Valid CreateNotificationMethodCommand command) { - command.validate(this.validPeriods); - - NotificationMethod notificationMethod = - Links.hydrate(repo.create(tenantId, command.name, command.type, - command.address, command.getConvertedPeriod()), uriInfo, - false); - return Response.created(URI.create(notificationMethod.getId())).entity(notificationMethod) - .build(); - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object list(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId, - @QueryParam("sort_by") String sortByStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) throws UnsupportedEncodingException { - - List sortByList = Validation.parseAndValidateSortBy(sortByStr, ALLOWED_SORT_BY); - if (!Strings.isNullOrEmpty(offset)) { - Validation.parseAndValidateNumber(offset, "offset"); - } - - final int paging_limit = this.persistUtils.getLimit(limit); - final List resources = repo.find(tenantId, sortByList, offset, - paging_limit); - return Links.paginate(paging_limit, - Links.hydrate(resources, uriInfo), - uriInfo); - - } - - @GET - @Timed - @Path("/{notification_method_id}") - @Produces(MediaType.APPLICATION_JSON) - public NotificationMethod get(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("notification_method_id") String notificationMethodId) { - return Links.hydrate(repo.findById(tenantId, notificationMethodId), uriInfo, true); - } - - @PUT - @Timed - @Path("/{notification_method_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public NotificationMethod update(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("notification_method_id") String notificationMethodId, - @Valid UpdateNotificationMethodCommand command) { - command.validate(this.validPeriods); - - return Links.hydrate( - repo.update(tenantId, notificationMethodId, command.name, command.type, - command.address, command.getConvertedPeriod()), - uriInfo, true); - } - - @PATCH - @Timed - @Path("/{notification_method_id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public NotificationMethod patch(@Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("notification_method_id") String notificationMethodId, - @Valid PatchNotificationMethodCommand command) { - NotificationMethod originalNotificationMethod = repo.findById(tenantId, notificationMethodId); - String name = command.name == null ? originalNotificationMethod.getName() - : command.name; - String type = command.type == null ? originalNotificationMethod.getType() - : command.type; - String address = command.address == null ? originalNotificationMethod.getAddress() - : command.address; - int period = command.period == null ? originalNotificationMethod.getPeriod() - : command.getConvertedPeriod(); - - NotificationMethodValidation.validate(type, address, period, this.validPeriods); - - return Links.hydrate( - repo.update(tenantId, notificationMethodId, name, type, - address, period), - uriInfo, true); - } - - @DELETE - @Timed - @Path("/{notification_method_id}") - public void delete(@HeaderParam("X-Tenant-Id") String tenantId, - @PathParam("notification_method_id") String notificationMethodId) { - repo.deleteById(tenantId, notificationMethodId); - } -} diff --git a/java/src/main/java/monasca/api/resource/NotificationMethodTypesResource.java b/java/src/main/java/monasca/api/resource/NotificationMethodTypesResource.java deleted file mode 100644 index 707f9b0ea..000000000 --- a/java/src/main/java/monasca/api/resource/NotificationMethodTypesResource.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import com.codahale.metrics.annotation.Timed; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.notificationmethod.NotificationMethodType; -import monasca.api.domain.model.notificationmethod.NotificationMethodTypesRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - - -/** - * Notification Method resource implementation. - */ -@Path("/v2.0/notification-methods/types") -public class NotificationMethodTypesResource { - - - NotificationMethodTypesRepo repo = null; - private final PersistUtils persistUtils; - - - @Inject - public NotificationMethodTypesResource(ApiConfig config, NotificationMethodTypesRepo repo, - PersistUtils persistUtils) { - this.repo = repo; - this.persistUtils = persistUtils; - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - public Object list(@Context UriInfo uriInfo, @QueryParam("sort_by") String sortByStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) throws UnsupportedEncodingException { - - List resources = new ArrayList(); - for (String method_type: repo.listNotificationMethodTypes()){ - resources.add(new NotificationMethodType(method_type)); - } - - final int paging_limit = this.persistUtils.getLimit(limit); - return Links.paginate(paging_limit, resources, uriInfo); - } -} diff --git a/java/src/main/java/monasca/api/resource/StatisticResource.java b/java/src/main/java/monasca/api/resource/StatisticResource.java deleted file mode 100644 index ba6896ed0..000000000 --- a/java/src/main/java/monasca/api/resource/StatisticResource.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import static monasca.api.app.validation.Validation.DEFAULT_ADMIN_ROLE; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; - -import com.codahale.metrics.annotation.Timed; - -import org.joda.time.DateTime; - -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.app.validation.MetricNameValidation; -import monasca.api.ApiConfig; -import monasca.api.app.validation.Validation; -import monasca.api.domain.model.statistic.StatisticRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -// import monasca.common.util.stats.Statistics; - -/** - * Statistics resource implementation. - */ -@Path("/v2.0/metrics/statistics") -public class StatisticResource { - private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults(); - private final String admin_role; - private final StatisticRepo repo; - private final PersistUtils persistUtils; - - @Inject - public StatisticResource(ApiConfig config, StatisticRepo repo, PersistUtils persistUtils) { - this.admin_role = (config.middleware == null || config.middleware.adminRole == null) - ? DEFAULT_ADMIN_ROLE : config.middleware.adminRole; - this.repo = repo; - this.persistUtils = persistUtils; - } - - @GET - @Timed - @Produces(MediaType.APPLICATION_JSON) - - public Object get( - @Context UriInfo uriInfo, - @HeaderParam("X-Tenant-Id") String tenantId, - @HeaderParam("X-Roles") String roles, - @QueryParam("name") String name, - @QueryParam("dimensions") String dimensionsStr, - @QueryParam("start_time") String startTimeStr, - @QueryParam("end_time") String endTimeStr, - @QueryParam("statistics") String statisticsStr, - @DefaultValue("300") @QueryParam("period") String periodStr, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit, - @QueryParam("tenant_id") String crossTenantId, - @QueryParam("merge_metrics") String mergeMetricsFlag, - @QueryParam("group_by") String groupByStr) throws Exception { - - // Validate query parameters - Validation.validateNotNullOrEmpty(name, "name"); - DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", true); - DateTime endTime = Validation.parseAndValidateDate(endTimeStr, "end_time", false); - Validation.validateTimes(startTime, endTime); - Validation.validateNotNullOrEmpty(statisticsStr, "statistics"); - int period = Validation.parseAndValidateNumber(periodStr, "period"); - List statistics = - Validation.parseValidateAndNormalizeStatistics(COMMA_SPLITTER.split(statisticsStr)); - Map dimensions = - Strings.isNullOrEmpty(dimensionsStr) ? null : Validation - .parseAndValidateDimensions(dimensionsStr); - MetricNameValidation.validate(name, true); - Boolean mergeMetricsFlagBool = Validation.validateAndParseMergeMetricsFlag(mergeMetricsFlag); - List groupBy = Validation.parseAndValidateMetricsGroupBy(groupByStr); - - String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role); - - return Links.paginateMeasurements(this.persistUtils.getLimit(limit), - repo.find(queryTenantId, name, dimensions, startTime, endTime, - statistics, period, offset, - this.persistUtils.getLimit(limit), - mergeMetricsFlagBool, groupBy), - uriInfo); - } - -} diff --git a/java/src/main/java/monasca/api/resource/VersionResource.java b/java/src/main/java/monasca/api/resource/VersionResource.java deleted file mode 100644 index 7d79f9761..000000000 --- a/java/src/main/java/monasca/api/resource/VersionResource.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource; - -import com.codahale.metrics.annotation.Timed; - -import java.io.UnsupportedEncodingException; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.UriInfo; - -import monasca.api.domain.model.version.Version; -import monasca.api.domain.model.version.VersionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -/** - * Version resource implementation. - */ -@Path("/") -@Produces(MediaType.APPLICATION_JSON) -public class VersionResource { - private final VersionRepo repository; - private final PersistUtils persistUtils; - - @Inject - public VersionResource(VersionRepo repository, - PersistUtils persistUtils) { - this.repository = repository; - this.persistUtils = persistUtils; - } - - @GET - @Timed - public Object list(@Context UriInfo uriInfo, - @QueryParam("offset") String offset, - @QueryParam("limit") String limit) throws UnsupportedEncodingException { - - return Links.paginate(this.persistUtils.getLimit(limit), - Links.hydrate(repository.find(), uriInfo), uriInfo); - } - - @GET - @Timed - @Path("{version_id}") - public Version get(@Context UriInfo uriInfo, @PathParam("version_id") String versionId) { - return Links.hydrate(repository.findById(versionId), uriInfo, true); - } -} diff --git a/java/src/main/java/monasca/api/resource/annotation/PATCH.java b/java/src/main/java/monasca/api/resource/annotation/PATCH.java deleted file mode 100644 index 60c012939..000000000 --- a/java/src/main/java/monasca/api/resource/annotation/PATCH.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.ws.rs.HttpMethod; - -@Target({ElementType.METHOD}) -@Retention(RetentionPolicy.RUNTIME) -@HttpMethod("PATCH") -public @interface PATCH { -} diff --git a/java/src/main/java/monasca/api/resource/exception/ConstraintViolationExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/ConstraintViolationExceptionMapper.java deleted file mode 100644 index 84591ce01..000000000 --- a/java/src/main/java/monasca/api/resource/exception/ConstraintViolationExceptionMapper.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import io.dropwizard.jersey.validation.ValidationErrorMessage; - -import javax.validation.ConstraintViolationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.resource.exception.Exceptions.FaultType; - -@Provider -public class ConstraintViolationExceptionMapper implements - ExceptionMapper { - private static final int UNPROCESSABLE_ENTITY = 422; - - @Override - public Response toResponse(ConstraintViolationException exception) { - final ValidationErrorMessage message = - new ValidationErrorMessage(exception.getConstraintViolations()); - String msg = - message.getErrors().isEmpty() ? exception.getMessage() : message.getErrors().toString(); - return Response.status(UNPROCESSABLE_ENTITY).type(MediaType.APPLICATION_JSON) - .entity(Exceptions.buildLoggedErrorMessage(FaultType.UNPROCESSABLE_ENTITY, msg)).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/EntityExistsExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/EntityExistsExceptionMapper.java deleted file mode 100644 index 4868a0080..000000000 --- a/java/src/main/java/monasca/api/resource/exception/EntityExistsExceptionMapper.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.resource.exception.Exceptions.FaultType; - -@Provider -public class EntityExistsExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(EntityExistsException e) { - return Response.status(Status.CONFLICT).type(MediaType.APPLICATION_JSON) - .entity(Exceptions.buildLoggedErrorMessage(FaultType.CONFLICT, e.getMessage())).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/EntityNotFoundExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/EntityNotFoundExceptionMapper.java deleted file mode 100644 index bd3b8d911..000000000 --- a/java/src/main/java/monasca/api/resource/exception/EntityNotFoundExceptionMapper.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.resource.exception.Exceptions.FaultType; - -@Provider -public class EntityNotFoundExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(EntityNotFoundException e) { - return Response.status(Status.NOT_FOUND).type(MediaType.APPLICATION_JSON) - .entity(Exceptions.buildLoggedErrorMessage(FaultType.NOT_FOUND, e.getMessage())).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/ErrorMessage.java b/java/src/main/java/monasca/api/resource/exception/ErrorMessage.java deleted file mode 100644 index 1b5a7160c..000000000 --- a/java/src/main/java/monasca/api/resource/exception/ErrorMessage.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -public class ErrorMessage { - public int code; - public String message; - public String details; - @JsonProperty("internal_code") - public String internalCode; - - ErrorMessage() {} - - public ErrorMessage(int code, String message, String details, String internalCode) { - Preconditions.checkNotNull(internalCode, "internalCode"); - - this.code = code; - this.message = message == null ? "" : message; - this.details = details == null ? "" : details; - this.internalCode = internalCode; - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/Exceptions.java b/java/src/main/java/monasca/api/resource/exception/Exceptions.java deleted file mode 100644 index bf1e90301..000000000 --- a/java/src/main/java/monasca/api/resource/exception/Exceptions.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import java.util.Random; - -import javax.annotation.Nullable; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; -import com.google.common.base.Splitter; - -/** - * Exception factory methods. - */ -public final class Exceptions { - private static final Logger LOG = LoggerFactory.getLogger(Exceptions.class); - private static final ObjectMapper OBJECT_MAPPER; - private static final Splitter LINE_SPLITTER = Splitter.on("\n").trimResults(); - private static final Random RANDOM = new Random(); - - static { - OBJECT_MAPPER = new ObjectMapper(); - OBJECT_MAPPER - .setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - } - - public enum FaultType { - - SERVER_ERROR(Status.INTERNAL_SERVER_ERROR, true), - BAD_REQUEST(Status.BAD_REQUEST, true), - UNAUTHORIZED(Status.UNAUTHORIZED, false), - NOT_FOUND(Status.NOT_FOUND, true), - CONFLICT(Status.CONFLICT, true), - UNPROCESSABLE_ENTITY(422, true), - FORBIDDEN(Status.FORBIDDEN, true); - - public final int statusCode; - public final boolean loggable; - - FaultType(int statusCode, boolean loggable) { - this.statusCode = statusCode; - this.loggable = loggable; - } - - FaultType(Status status, boolean loggable) { - this.statusCode = status.getStatusCode(); - this.loggable = loggable; - } - - @Override - public String toString() { - return name().toLowerCase(); - } - } - - private static class WebAppException extends WebApplicationException { - private static final long serialVersionUID = 1L; - - public WebAppException(FaultType faultType, String message) { - super(Response.status(faultType.statusCode).entity(message).type(MediaType.APPLICATION_JSON) - .build()); - } - } - - private Exceptions() {} - - public static WebApplicationException badRequest(String msg, Object... args) { - return new WebAppException(FaultType.BAD_REQUEST, buildLoggedErrorMessage( - FaultType.BAD_REQUEST, msg, args)); - } - - /** - * Builds and returns an error message containing an error code, and logs the message with the - * corresponding error code. - */ - public static String buildLoggedErrorMessage(FaultType faultType, String message, Object... args) { - return buildLoggedErrorMessage(faultType, - args == null || args.length == 0 ? message : String.format(message, args), null, null); - } - - /** - * Builds and returns an error message containing an error code, and logs the message with the - * corresponding error code. - */ - public static String buildLoggedErrorMessage(FaultType faultType, String message, - @Nullable String details, @Nullable Throwable exception) { - String errorCode = Long.toHexString(RANDOM.nextLong()); - - if (faultType.loggable) { - String withoutDetails = "{} {} - {}"; - String withDetails = "{} {} - {} {}"; - - if (details == null) { - if (exception == null) - LOG.error(withoutDetails, faultType.name(), errorCode, message); - else - LOG.error(withoutDetails, faultType.name(), errorCode, message, exception); - } else { - if (exception == null) - LOG.error(withDetails, faultType.name(), errorCode, message, details); - else - LOG.error(withDetails, faultType.name(), errorCode, message, details, exception); - } - } - - try { - StringBuilder str = new StringBuilder("{\""); - str.append(faultType.toString()); - str.append("\":"); - str.append(OBJECT_MAPPER.writeValueAsString(new ErrorMessage(faultType.statusCode, message, - details, errorCode))); - str.append("}"); - return str.toString(); - } catch (JsonProcessingException bestEffort) { - return null; - } - } - - public static WebApplicationException forbidden(String msg, Object... args) { - return new WebAppException(FaultType.FORBIDDEN, buildLoggedErrorMessage(FaultType.FORBIDDEN, - msg, args)); - } - - /** - * Returns the first line off of a stacktrace message. - */ - public static String stripLocationFromStacktrace(String message) { - for (String s : LINE_SPLITTER.split(message)) - return s; - return message; - } - - /** - * Indicates that the content of a a POSTed request entity is invalid. - */ - public static WebApplicationException unprocessableEntity(String msg, Object... args) { - return new WebAppException(FaultType.UNPROCESSABLE_ENTITY, buildLoggedErrorMessage( - FaultType.UNPROCESSABLE_ENTITY, msg, args)); - } - - /** - * Indicates that the content of a a POSTed request entity is invalid. - */ - public static WebApplicationException unprocessableEntityDetails(String msg, String details, - Exception exception) { - return new WebAppException(FaultType.UNPROCESSABLE_ENTITY, buildLoggedErrorMessage( - FaultType.UNPROCESSABLE_ENTITY, msg, details, exception)); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/IllegalArgumentExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/IllegalArgumentExceptionMapper.java deleted file mode 100644 index d2adb712f..000000000 --- a/java/src/main/java/monasca/api/resource/exception/IllegalArgumentExceptionMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.resource.exception.Exceptions.FaultType; - -@Provider -public class IllegalArgumentExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(IllegalArgumentException e) { - return Response.status(Status.BAD_REQUEST).type(MediaType.APPLICATION_JSON) - .entity(Exceptions.buildLoggedErrorMessage(FaultType.BAD_REQUEST, e.getMessage())).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/InvalidEntityExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/InvalidEntityExceptionMapper.java deleted file mode 100644 index 6792582ca..000000000 --- a/java/src/main/java/monasca/api/resource/exception/InvalidEntityExceptionMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.domain.exception.InvalidEntityException; -import monasca.api.resource.exception.Exceptions.FaultType; - -@Provider -public class InvalidEntityExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(InvalidEntityException e) { - return Response.status(FaultType.BAD_REQUEST.statusCode).type(MediaType.APPLICATION_JSON) - .entity(Exceptions.buildLoggedErrorMessage(FaultType.BAD_REQUEST, e.getMessage())).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/JsonMappingExceptionManager.java b/java/src/main/java/monasca/api/resource/exception/JsonMappingExceptionManager.java deleted file mode 100644 index 43d8b6cad..000000000 --- a/java/src/main/java/monasca/api/resource/exception/JsonMappingExceptionManager.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import com.fasterxml.jackson.databind.JsonMappingException; -import monasca.api.resource.exception.Exceptions.FaultType; - -/** - * Adapted from Dropwizard's JsonMappingExceptionManager. - */ -@Provider -public class JsonMappingExceptionManager implements ExceptionMapper { - @Override - public Response toResponse(JsonMappingException exception) { - return Response - .status(FaultType.UNPROCESSABLE_ENTITY.statusCode) - .type(MediaType.APPLICATION_JSON) - .entity( - Exceptions.buildLoggedErrorMessage(FaultType.UNPROCESSABLE_ENTITY, - "Unable to process the provided JSON", - Exceptions.stripLocationFromStacktrace(exception.getMessage()), null)).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/JsonProcessingExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/JsonProcessingExceptionMapper.java deleted file mode 100644 index 35a113533..000000000 --- a/java/src/main/java/monasca/api/resource/exception/JsonProcessingExceptionMapper.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import com.fasterxml.jackson.core.JsonGenerationException; -import com.fasterxml.jackson.core.JsonProcessingException; -import monasca.api.resource.exception.Exceptions.FaultType; - -/** - * Adapted from Dropwizard's JsonProcessingExceptionMapper. - */ -@Provider -public class JsonProcessingExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(JsonProcessingException exception) { - /* - * If the error is in the JSON generation, it's a server error. - */ - if (exception instanceof JsonGenerationException) - return Response - .status(Status.INTERNAL_SERVER_ERROR) - .type(MediaType.APPLICATION_JSON) - .entity( - Exceptions.buildLoggedErrorMessage(FaultType.SERVER_ERROR, "Error generating JSON", - null, exception)).build(); - - final String message = exception.getMessage(); - - /* - * If we can't deserialize the JSON because someone forgot a no-arg constructor, it's a server - * error and we should inform the developer. - */ - if (message.startsWith("No suitable constructor found")) - return Response - .status(Status.INTERNAL_SERVER_ERROR) - .type(MediaType.APPLICATION_JSON) - .entity( - Exceptions.buildLoggedErrorMessage(FaultType.SERVER_ERROR, - "Unable to deserialize the provided JSON", null, exception)).build(); - - /* - * Otherwise, it's those pesky users. - */ - return Response - .status(Status.BAD_REQUEST) - .type(MediaType.APPLICATION_JSON) - .entity( - Exceptions.buildLoggedErrorMessage(FaultType.BAD_REQUEST, - "Unable to process the provided JSON", - Exceptions.stripLocationFromStacktrace(message), exception)).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/MultipleMetricsExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/MultipleMetricsExceptionMapper.java deleted file mode 100644 index 8b64a335b..000000000 --- a/java/src/main/java/monasca/api/resource/exception/MultipleMetricsExceptionMapper.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2015 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.domain.exception.MultipleMetricsException; - -@Provider -public class MultipleMetricsExceptionMapper implements ExceptionMapper { - - private static final String - MULTIPLE_METRICS_ERROR_MSG = - "Found multiple metrics matching metric name and dimensions. " - + "Please refine your search criteria using a unique metric name or additional dimensions. " - + "Alternatively, you may specify 'merge_metrics=true' as a query param to combine " - + "all metrics matching search criteria into a single series."; - - - @Override - public Response toResponse(MultipleMetricsException exception) { - - String details = String.format("search criteria: {metric name: %s, dimensions: %s}", - exception.getMetricName(), exception.getDimensions()); - - return Response.status(Response.Status.CONFLICT).type(MediaType.APPLICATION_JSON).entity( - Exceptions - .buildLoggedErrorMessage(Exceptions.FaultType.CONFLICT, MULTIPLE_METRICS_ERROR_MSG, - details, null)).build(); - - } -} diff --git a/java/src/main/java/monasca/api/resource/exception/ThrowableExceptionMapper.java b/java/src/main/java/monasca/api/resource/exception/ThrowableExceptionMapper.java deleted file mode 100644 index 9f1a9ce61..000000000 --- a/java/src/main/java/monasca/api/resource/exception/ThrowableExceptionMapper.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.exception; - -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import monasca.api.resource.exception.Exceptions.FaultType; - -/** - * Adapted from Dropwizard's LoggingExceptionMapper. - * - * @param Exception type - */ -@Provider -public class ThrowableExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(E exception) { - if (exception instanceof WebApplicationException) - return ((WebApplicationException) exception).getResponse(); - - return Response - .status(Status.INTERNAL_SERVER_ERROR) - .type(MediaType.APPLICATION_JSON) - .entity( - Exceptions.buildLoggedErrorMessage(FaultType.SERVER_ERROR, - "An internal server error occurred", null, exception)).build(); - } -} diff --git a/java/src/main/java/monasca/api/resource/serialization/SubAlarmExpressionSerializer.java b/java/src/main/java/monasca/api/resource/serialization/SubAlarmExpressionSerializer.java deleted file mode 100644 index 7836c2649..000000000 --- a/java/src/main/java/monasca/api/resource/serialization/SubAlarmExpressionSerializer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.resource.serialization; - -import java.io.IOException; -import java.util.Collections; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.SerializerProvider; -import monasca.common.model.alarm.AlarmSubExpression; - -public class SubAlarmExpressionSerializer extends JsonSerializer { - @Override - public void serialize(AlarmSubExpression value, JsonGenerator jgen, SerializerProvider provider) - throws IOException, JsonProcessingException { - jgen.writeStartObject(); - jgen.writeStringField("function", value.getFunction().name()); - jgen.writeStringField("metric_name", value.getMetricDefinition().name); - jgen.writeObjectField( - "dimensions", - value.getMetricDefinition().dimensions == null ? Collections.emptyMap() : value - .getMetricDefinition().dimensions); - jgen.writeStringField("operator", value.getOperator().name()); - jgen.writeNumberField("threshold", value.getThreshold()); - jgen.writeNumberField("period", value.getPeriod()); - jgen.writeNumberField("periods", value.getPeriods()); - jgen.writeEndObject(); - } - - @Override - public Class handledType() { - return AlarmSubExpression.class; - } -} diff --git a/java/src/main/resources/api-config.yml b/java/src/main/resources/api-config.yml deleted file mode 100644 index e9b0c4f71..000000000 --- a/java/src/main/resources/api-config.yml +++ /dev/null @@ -1,150 +0,0 @@ -# The region for which all metrics passing through this server will be persisted -region: useast - -# Whether this server is running on a secure port -accessedViaHttps: false - -# Topic for publishing metrics to -metricsTopic: metrics - -# Topic for publishing domain events to -eventsTopic: events - -# Maximum rows (Mysql and Vertica) or points (Influxdb) to return when listing elements. -# If not set or set to any value less than or equal to 0, then defaults to 10,000. -maxQueryLimit: 10000 - -# Valid periods for notification -validNotificationPeriods: - - 60 - -kafka: - brokerUris: - - 192.168.10.6:9092 - zookeeperUris: - - 192.168.10.6:2181 - healthCheckTopic: healthcheck - -mysql: - driverClass: com.mysql.jdbc.Driver - url: jdbc:mysql://192.168.10.6:3306/mon?connectTimeout=5000&autoReconnect=true&useLegacyDatetimeCode=false&characterEncoding=utf8 - user: monapi - password: password - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 8 - maxSize: 32 - checkConnectionWhileIdle: false - checkConnectionOnBorrow: true - -databaseConfiguration: -# databaseType can be (vertica | influxdb) - databaseType: influxdb - -# Uncomment if databaseType is influxDB -influxDB: - maxHttpConnections: 100 - name: mon - replicationFactor: 1 - url: http://192.168.10.6:8086 - user: mon_api - password: password - -# Uncomment if databaseType is vertica -# As of 7/10 there is a bug in the monasca-api that requires this section even if databaseType is set to influxdb -vertica: - driverClass: com.vertica.jdbc.Driver - url: jdbc:vertica://192.168.10.6/mon - user: dbadmin - password: password - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 4 - maxSize: 32 - checkConnectionWhileIdle: false - # - # vertica database hint to be added to SELECT - # statements. For example, the hint below is used - # to tell vertica that the query can be satisfied - # locally (replicated projection). - # - # dbHint: "/*+KV(01)*/" - dbHint: "" - -middleware: - enabled: true - serverVIP: 192.168.10.6 - serverPort: 5000 - connTimeout: 2000 - connSSLClientAuth: false - connPoolMaxActive: 3 - connPoolMaxIdle: 3 - connPoolEvictPeriod: 600000 - connPoolMinIdleTime: 600000 - connRetryTimes: 2 - connRetryInterval: 50 - defaultAuthorizedRoles: [user, domainuser, domainadmin, monasca-user] - agentAuthorizedRoles: [monasca-agent] - adminAuthMethod: password - adminUser: admin - adminPassword: secretadmin - adminProjectId: - adminProjectName: admin - adminUserDomainId: - adminUserDomainName: - adminToken: - timeToCacheToken: 600 - maxTokenCacheSize: 1048576 - -server: - applicationConnectors: - - type: http - maxRequestHeaderSize: 16KiB # Allow large headers used by keystone tokens - -# Logging settings. -logging: - - # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, DEBUG, TRACE, or ALL. - level: debug - - # Logger-specific levels. - loggers: - - # Sets the level for 'com.example.app' to DEBUG. - com.example.app: DEBUG - - appenders: - - type: console - threshold: ALL - timeZone: UTC - target: stdout - logFormat: # TODO - - - type: file - currentLogFilename: /var/log/monasca/monasca-api.log - threshold: ALL - archive: true - archivedLogFilenamePattern: /var/log/monasca/monasca-api-%d.log.gz - archivedFileCount: 5 - timeZone: UTC - logFormat: # TODO - - - type: syslog - host: 192.168.10.4 - port: 514 - facility: local0 - threshold: ALL - logFormat: # TODO - -hibernate: - supportEnabled: false - providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider - dataSourceClassName: org.postgresql.ds.PGPoolingDataSource - serverName: localhost - portNumber: 5432 - databaseName: mon - user: mon - password: mon - initialConnections: 25 - maxConnections: 100 - autoConfig: validate diff --git a/java/src/test/java/monasca/api/app/AlarmDefinitionServiceTest.java b/java/src/test/java/monasca/api/app/AlarmDefinitionServiceTest.java deleted file mode 100644 index ed78c48e6..000000000 --- a/java/src/test/java/monasca/api/app/AlarmDefinitionServiceTest.java +++ /dev/null @@ -1,611 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app; - -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.ws.rs.WebApplicationException; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; - -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import monasca.api.ApiConfig; -import monasca.api.app.AlarmDefinitionService.SubExpressions; -import monasca.api.app.command.UpdateAlarmDefinitionCommand; -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.event.AlarmDefinitionUpdatedEvent; -import monasca.common.util.Serialization; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.domain.exception.EntityExistsException; - -@Test -public class AlarmDefinitionServiceTest { - private static final String EXPR2 = "avg(bar{instance_id=777}) > 80"; - - private static final String EXPR1 = "avg(foo{instance_id=123}) > 90"; - - final static String TENANT_ID = "bob"; - - AlarmDefinitionService service; - ApiConfig config; - Producer producer; - AlarmDefinitionRepo repo; - NotificationMethodRepo notificationMethodRepo; - - @BeforeMethod - @SuppressWarnings("unchecked") - protected void beforeMethod() { - config = new ApiConfig(); - producer = mock(Producer.class); - repo = mock(AlarmDefinitionRepo.class); - notificationMethodRepo = mock(NotificationMethodRepo.class); - AlarmRepo alarmRepo = mock(AlarmRepo.class); - service = new AlarmDefinitionService(config, producer, repo, alarmRepo, notificationMethodRepo); - - when( - repo.create(anyString(), anyString(), anyString(), anyString(), anyString(), anyString(), - any(Map.class), any(List.class), any(List.class), any(List.class), any(List.class))) - .thenAnswer(new Answer() { - @Override - public AlarmDefinition answer(InvocationOnMock invocation) throws Throwable { - Object[] args = invocation.getArguments(); - return new AlarmDefinition((String) args[0], (String) args[2], (String) args[3], - (String) args[4], (String) args[5], (List) args[7], true, - (List) args[8], (List) args[9], (List) args[10]); - } - }); - } - - @SuppressWarnings("unchecked") - public void shouldCreate() { - String exprStr = "avg(cpu_utilization{service=hpcs.compute, instance_id=123}) > 90"; - List matchBy = Arrays.asList("service", "instance_id"); - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - - AlarmDefinition alarm = - service.create(TENANT_ID, "90% CPU", "foo", "LOW", exprStr, AlarmExpression.of(exprStr), - matchBy, alarmActions, okActions, undeterminedActions); - - AlarmDefinition expected = - new AlarmDefinition(alarm.getId(), "90% CPU", "foo", "LOW", exprStr, matchBy, true, - alarmActions, okActions, undeterminedActions); - assertEquals(expected, alarm); - verify(repo).create(eq(TENANT_ID), anyString(), eq("90% CPU"), eq("foo"), eq("LOW"), eq(exprStr), - any(Map.class), eq(matchBy), eq(alarmActions), eq(okActions), eq(undeterminedActions)); - verify(producer).send(any(KeyedMessage.class)); - } - - public void updateFailsDueToMatchBy() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final List newMatchBy = Arrays.asList("service"); - verifyChangeFails(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - oldAlarmDef.getExpression(), newMatchBy, oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), - oldAlarmDef.getAlarmActions(), oldAlarmDef.getOkActions(), - oldAlarmDef.getUndeterminedActions(), "match_by"); - } - - public void changeFailsDueToDeletedExpression() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - verifyChangeFails(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - EXPR1, matchBy, oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), - oldAlarmDef.getAlarmActions(), oldAlarmDef.getOkActions(), - oldAlarmDef.getUndeterminedActions(), "subexpressions"); - } - - public void changeFailsDueToAddedExpression() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final String newExpression = EXPR1 + " or " + EXPR2 + " or " + "min(cpu.idle) < 10"; - verifyChangeFails(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - newExpression, matchBy, oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), - oldAlarmDef.getAlarmActions(), oldAlarmDef.getOkActions(), - oldAlarmDef.getUndeterminedActions(), "subexpressions"); - } - - public void changeFailsDueToChangedMetricName() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final String newExpression = EXPR1 + " or " + EXPR2.replace("bar", "barz"); - verifyChangeFails(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - newExpression, matchBy, oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), - oldAlarmDef.getAlarmActions(), oldAlarmDef.getOkActions(), - oldAlarmDef.getUndeterminedActions(), "metric"); - } - - public void changeFailsDueToChangedMetricDimension() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final String newExpression = EXPR1 + " or " + EXPR2.replace("777", "888"); - verifyChangeFails(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - newExpression, matchBy, oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), - oldAlarmDef.getAlarmActions(), oldAlarmDef.getOkActions(), - oldAlarmDef.getUndeterminedActions(), "metric"); - } - - private void verifyChangeFails(final String id, String name, String description, - final String expression, List matchBy, String severity, boolean actionsEnabled, List alarmActions, - List okActions, List undeterminedActions, String expected) { - UpdateAlarmDefinitionCommand command = - new UpdateAlarmDefinitionCommand(name, description, expression, matchBy, severity, - actionsEnabled, alarmActions, okActions, undeterminedActions); - try { - service.update(TENANT_ID, id, AlarmExpression.of(command.expression), command); - fail("Update of AlarmDefinition succeeded when it should have failed"); - } - catch(WebApplicationException e) { - assertEquals(e.getResponse().getStatus(), 422); - assertTrue(e.getResponse().getEntity().toString().contains(expected)); - } - - try { - service.patch(TENANT_ID, id, name, description, severity, expression, - AlarmExpression.of(expression), matchBy, actionsEnabled, alarmActions, okActions, - undeterminedActions); - fail("Patch of AlarmDefinition succeeded when it should have failed"); - } - catch(WebApplicationException e) { - assertEquals(e.getResponse().getStatus(), 422); - assertTrue(e.getResponse().getEntity().toString().contains(expected)); - } - } - - public void shouldChange() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final String newExprStr = oldAlarmDef.getExpression().replace("90", "75").replace(" or ", " and "); - final List newAlarmActions = Arrays.asList("5", "6", "7"); - final List newOkActions = Arrays.asList("6", "7"); - final List newUndeterminedActions = Arrays.asList("7"); - final String newSeverity = "HIGH"; - final String newName = "foo bar baz"; - final String newDescription = "foo bar baz"; - final boolean newEnabled = false; - UpdateAlarmDefinitionCommand command = - new UpdateAlarmDefinitionCommand(newName, newDescription, newExprStr, matchBy, newSeverity, - newEnabled, newAlarmActions, newOkActions, newUndeterminedActions); - - final AlarmDefinition expected = - new AlarmDefinition(oldAlarmDef.getId(), newName, newDescription, newSeverity, newExprStr, matchBy, - newEnabled, newAlarmActions, newOkActions, newUndeterminedActions); - - final AlarmDefinition updatedAlarmDef = - service.update(TENANT_ID, oldAlarmDef.getId(), AlarmExpression.of(command.expression), - command); - assertEquals(updatedAlarmDef, expected); - - final AlarmDefinition patchedAlarmDef = - service.patch(TENANT_ID, oldAlarmDef.getId(), newName, newDescription, newSeverity, - newExprStr, AlarmExpression.of(newExprStr), matchBy, newEnabled, newAlarmActions, - newOkActions, newUndeterminedActions); - assertEquals(patchedAlarmDef, expected); - - final Map emptyMap = new HashMap<>(); - final Map changedSubExpressions = new HashMap<>(); - final Map unchangedSubExpressions = new HashMap<>(); - changedSubExpressions.put("444", AlarmSubExpression.of("avg(foo{instance_id=123}) > 75")); - unchangedSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - final AlarmDefinitionUpdatedEvent event = - new AlarmDefinitionUpdatedEvent(TENANT_ID, oldAlarmDef.getId(), newName, newDescription, - newExprStr, matchBy, newEnabled, newSeverity, emptyMap, changedSubExpressions, - unchangedSubExpressions, emptyMap); - verify(producer).send( - new KeyedMessage(config.eventsTopic, - String.valueOf(service.eventCount - 1), Serialization.toJson(event))); - } - - public void shouldPatchExpression() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final String newExprStr = oldAlarmDef.getExpression().replace("90", "75").replace(" or ", " and "); - assertNotEquals(newExprStr, oldAlarmDef.getExpression()); - - final Map changedSubExpressions = new HashMap<>(); - final Map unchangedSubExpressions = new HashMap<>(); - changedSubExpressions.put("444", AlarmSubExpression.of(EXPR1.replace("90", "75"))); - unchangedSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - - patchExpression(newExprStr, oldAlarmDef, changedSubExpressions, unchangedSubExpressions); - - final String newExprStr2 = EXPR2.replace("avg", "min") + " and " + EXPR1.replace("avg", "max"); - assertNotEquals(newExprStr2, oldAlarmDef.getExpression()); - changedSubExpressions.clear(); - unchangedSubExpressions.clear(); - changedSubExpressions.put("444", AlarmSubExpression.of(EXPR1.replace("avg", "max"))); - changedSubExpressions.put("555", AlarmSubExpression.of(EXPR2.replace("avg", "min"))); - - patchExpression(newExprStr2, oldAlarmDef, changedSubExpressions, unchangedSubExpressions); - } - - private void patchExpression(final String newExprStr, final AlarmDefinition oldAlarmDef, - final Map changedSubExpressions, - final Map unchangedSubExpressions) { - BiMap oldExpressions = - HashBiMap.create(new HashMap()); - final Set oldSubAlarmIds = oldExpressions.keySet(); - Map newSubAlarms = new HashMap<>(); - final AlarmDefinition patchedAlarmDef = - service.patch(TENANT_ID, oldAlarmDef.getId(), null, null, null, - newExprStr, AlarmExpression.of(newExprStr), null, null, null, - null, null); - - final AlarmDefinition expected = - new AlarmDefinition(oldAlarmDef.getId(), oldAlarmDef.getName(), oldAlarmDef.getDescription(), - oldAlarmDef.getSeverity(), newExprStr, oldAlarmDef.getMatchBy(), - oldAlarmDef.isActionsEnabled(), oldAlarmDef.getAlarmActions(), - oldAlarmDef.getOkActions(), oldAlarmDef.getUndeterminedActions()); - assertEquals(patchedAlarmDef, expected); - - final Map emptyMap = new HashMap<>(); - final AlarmDefinitionUpdatedEvent event = - new AlarmDefinitionUpdatedEvent(TENANT_ID, oldAlarmDef.getId(), oldAlarmDef.getName(), - oldAlarmDef.getDescription(), newExprStr, oldAlarmDef.getMatchBy(), - oldAlarmDef.isActionsEnabled(), oldAlarmDef.getSeverity(), emptyMap, - changedSubExpressions, unchangedSubExpressions, emptyMap); - verify(producer).send( - new KeyedMessage(config.eventsTopic, - String.valueOf(service.eventCount - 1), Serialization.toJson(event))); - verify(repo).update(TENANT_ID, oldAlarmDef.getId(), true, oldAlarmDef.getName(), - oldAlarmDef.getDescription(), newExprStr, oldAlarmDef.getMatchBy(), - oldAlarmDef.getSeverity(), oldAlarmDef.isActionsEnabled(), oldSubAlarmIds, - changedSubExpressions, newSubAlarms, null, null, null); - } - - public void shouldPatchIndividual() { - final List matchBy = Arrays.asList("hostname", "service"); - final AlarmDefinition oldAlarmDef = setupInitialAlarmDefinition(matchBy); - - final List newAlarmActions = Arrays.asList("5", "6", "7"); - final List newOkActions = Arrays.asList("6", "7"); - final List newUndeterminedActions = Arrays.asList("7"); - final String newSeverity = "HIGH"; - final String newName = "foo bar baz"; - final String newDescription = "foo bar baz"; - final boolean newEnabled = false; - - doPatch(matchBy, oldAlarmDef, newName, newName, null, oldAlarmDef.getDescription(), null, - oldAlarmDef.getSeverity(), null, oldAlarmDef.isActionsEnabled(), null, - oldAlarmDef.getAlarmActions(), null, - oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), newDescription, newDescription, - null, oldAlarmDef.getSeverity(), null, oldAlarmDef.isActionsEnabled(), null, - oldAlarmDef.getAlarmActions(), null, - oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - newSeverity, newSeverity, null, oldAlarmDef.isActionsEnabled(), null, - oldAlarmDef.getAlarmActions(), null, - oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, null, - oldAlarmDef.getAlarmActions(), null, oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), null, oldAlarmDef.isActionsEnabled(), - newAlarmActions, newAlarmActions, - null, oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, null, - oldAlarmDef.getAlarmActions(), newOkActions, newOkActions, null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, null, - oldAlarmDef.getAlarmActions(), null, oldAlarmDef.getOkActions(), newUndeterminedActions, - newUndeterminedActions); - - final List emptyActionList = new ArrayList<>(); - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, emptyActionList, - emptyActionList, null, oldAlarmDef.getOkActions(), null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, null, - oldAlarmDef.getAlarmActions(), emptyActionList, emptyActionList, null, - oldAlarmDef.getUndeterminedActions()); - - doPatch(matchBy, oldAlarmDef, null, oldAlarmDef.getName(), null, oldAlarmDef.getDescription(), - null, oldAlarmDef.getSeverity(), newEnabled, newEnabled, null, - oldAlarmDef.getAlarmActions(), null, oldAlarmDef.getOkActions(), emptyActionList, - emptyActionList); - } - - private void doPatch(final List matchBy, final AlarmDefinition oldAlarmDef, - final String newName, final String expectedName, String newDescription, - String expectedDescription, final String newSeverity, final String expectedSeverity, - final Boolean actionsEnabled, final boolean expectedActionsEnabled, - final List newAlarmActions, final List expectedAlarmActions, - final List newOkActions, final List expectedOkActions, - final List newUndeterminedActions, final List expectedUndeterminedActions) { - final Map emptyMap = new HashMap<>(); - final Map changedSubExpressions = new HashMap<>(); - final Map unchangedSubExpressions = new HashMap<>(); - unchangedSubExpressions.put("444", AlarmSubExpression.of(EXPR1)); - unchangedSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - - BiMap oldExpressions = - HashBiMap.create(new HashMap()); - final Set oldSubAlarmIds = oldExpressions.keySet(); - Map changedSubAlarms = new HashMap<>(); - Map newSubAlarms = new HashMap<>(); - final AlarmDefinition patchedAlarmDef = - service.patch(TENANT_ID, oldAlarmDef.getId(), newName, newDescription, newSeverity, - null, null, null, actionsEnabled, newAlarmActions, - newOkActions, newUndeterminedActions); - - final AlarmDefinition expected = - new AlarmDefinition(oldAlarmDef.getId(), expectedName, expectedDescription, - expectedSeverity, oldAlarmDef.getExpression(), matchBy, - expectedActionsEnabled, expectedAlarmActions, - expectedOkActions, expectedUndeterminedActions); - assertEquals(patchedAlarmDef, expected); - - final AlarmDefinitionUpdatedEvent event = - new AlarmDefinitionUpdatedEvent(TENANT_ID, oldAlarmDef.getId(), expectedName, - expectedDescription, oldAlarmDef.getExpression(), matchBy, - expectedActionsEnabled, expectedSeverity, emptyMap, - changedSubExpressions, unchangedSubExpressions, emptyMap); - verify(producer).send( - new KeyedMessage(config.eventsTopic, - String.valueOf(service.eventCount - 1), Serialization.toJson(event))); - verify(repo).update(TENANT_ID, oldAlarmDef.getId(), true, expectedName, - expectedDescription, oldAlarmDef.getExpression(), matchBy, - expectedSeverity, expectedActionsEnabled, oldSubAlarmIds, - changedSubAlarms, newSubAlarms, newAlarmActions, newOkActions, newUndeterminedActions); - } - - private AlarmDefinition setupInitialAlarmDefinition(final List matchBy) { - final String alarmDefId = "123"; - String exprStr = EXPR1 + " or " + EXPR2; - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - AlarmDefinition oldAlarmDef = - new AlarmDefinition(alarmDefId, "foo bar", "foo bar", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - Map oldSubExpressions = new HashMap<>(); - oldSubExpressions.put("444", AlarmSubExpression.of(EXPR1)); - oldSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - - when(repo.findById(eq(TENANT_ID), eq(alarmDefId))).thenReturn(oldAlarmDef); - when(repo.findSubExpressions(eq(alarmDefId))).thenReturn(oldSubExpressions); - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - return oldAlarmDef; - } - - public void testOldAndNewSubExpressionsFor() { - Map oldSubExpressions = new HashMap<>(); - oldSubExpressions.put("111", AlarmSubExpression.of("avg(foo{instance_id=123}) > 1")); - oldSubExpressions.put("222", AlarmSubExpression.of("avg(foo{instance_id=456}) > 2")); - oldSubExpressions.put("333", AlarmSubExpression.of("avg(foo{instance_id=789}) > 3")); - - String newExprStr = - "avg(foo{instance_id=123}) > 1 or avg(foo{instance_id=456}) <= 22 or avg(foo{instance_id=444}) > 4"; - AlarmExpression newExpr = AlarmExpression.of(newExprStr); - - SubExpressions expressions = service.subExpressionsFor(oldSubExpressions, newExpr); - - // Assert old expressions - assertEquals(expressions.oldAlarmSubExpressions, - Collections.singletonMap("333", AlarmSubExpression.of("avg(foo{instance_id=789}) > 3"))); - - // Assert changed expressions - assertEquals(expressions.changedSubExpressions, - Collections.singletonMap("222", AlarmSubExpression.of("avg(foo{instance_id=456}) <= 22"))); - - // Assert unchanged expressions - assertEquals(expressions.unchangedSubExpressions, - Collections.singletonMap("111", AlarmSubExpression.of("avg(foo{instance_id=123}) > 1"))); - - // Assert new expressions - assertTrue(expressions.newAlarmSubExpressions.containsValue(AlarmSubExpression - .of("avg(foo{instance_id=444}) > 4"))); - } - - public void testSubExpressionsForUnchanged() { - Map oldSubExpressions = new HashMap<>(); - final String expr1 = "avg(foo{instance_id=123}) > 1"; - oldSubExpressions.put("111", AlarmSubExpression.of(expr1)); - final String expr2 = "avg(foo{instance_id=123}) < 4"; - oldSubExpressions.put("222", AlarmSubExpression.of(expr2)); - - String newExprStr = expr2 + " and " + expr1; - AlarmExpression newExpr = AlarmExpression.of(newExprStr); - - SubExpressions expressions = service.subExpressionsFor(oldSubExpressions, newExpr); - - // Assert old expressions - assertTrue(expressions.oldAlarmSubExpressions.isEmpty()); - - // Assert changed expressions - assertTrue(expressions.changedSubExpressions.isEmpty()); - - // Assert unchanged expressions - assertEquals(expressions.unchangedSubExpressions.size(), 2); - assertEquals(expressions.unchangedSubExpressions.get("111"), AlarmSubExpression.of(expr1)); - assertEquals(expressions.unchangedSubExpressions.get("222"), AlarmSubExpression.of(expr2)); - - // Assert new expressions - assertTrue(expressions.newAlarmSubExpressions.isEmpty()); - } - - @Test(expectedExceptions = EntityExistsException.class) - public void testPatchSameName() { - String exprStr = EXPR1 + " or " + EXPR2; - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - List matchBy = Arrays.asList("service", "instance_id"); - AlarmDefinition firstAlarmDef = - new AlarmDefinition("123", "91% CPU", "description1", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - AlarmDefinition secondAlarmDef = - new AlarmDefinition("234", "92% CPU", "description2", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - when(repo.findById(TENANT_ID, secondAlarmDef.getId())).thenReturn(secondAlarmDef); - when(repo.findById(TENANT_ID, firstAlarmDef.getId())).thenReturn(firstAlarmDef); - when(repo.exists(TENANT_ID, "91% CPU")).thenReturn("123"); - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - service.patch(TENANT_ID, secondAlarmDef.getId(), firstAlarmDef.getName(), "foo", "LOW", exprStr, null, - matchBy, true, alarmActions, okActions, undeterminedActions); - - } - - public void testPatchExistingAlarmName() { - String exprStr = EXPR1 + " or " + EXPR2; - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - List matchBy = Arrays.asList("service", "instance_id"); - AlarmDefinition firstAlarmDef = - new AlarmDefinition("123", "91% CPU", "description1", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - AlarmDefinition secondAlarmDef = - new AlarmDefinition("234", "92% CPU", "description2", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - when(repo.findById(TENANT_ID, secondAlarmDef.getId())).thenReturn(secondAlarmDef); - when(repo.findById(TENANT_ID, firstAlarmDef.getId())).thenReturn(firstAlarmDef); - when(repo.exists(TENANT_ID, "92% CPU")).thenReturn(secondAlarmDef.getId()); - - Map oldSubExpressions = new HashMap<>(); - oldSubExpressions.put("444", AlarmSubExpression.of(EXPR1)); - oldSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - when(repo.findSubExpressions(eq("234"))).thenReturn(oldSubExpressions); - - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - AlarmDefinition alarmPatched = service.patch(TENANT_ID, secondAlarmDef.getId(), "92% CPU", "foo", "LOW", exprStr, null, - matchBy, true, alarmActions, okActions, undeterminedActions); - assertEquals(alarmPatched.getName(), "92% CPU"); - - } - - public void testUpdateExistingAlarmName() { - String exprStr = EXPR1 + " or " + EXPR2; - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - List matchBy = Arrays.asList("service", "instance_id"); - AlarmDefinition firstAlarmDef = - new AlarmDefinition("123", "91% CPU", "description1", "LOW", exprStr, matchBy, true, - alarmActions, okActions, undeterminedActions); - - AlarmDefinition secondAlarmDef = - new AlarmDefinition("234", "92% CPU", "description2", "LOW", exprStr, matchBy, true, - alarmActions, okActions, undeterminedActions); - - UpdateAlarmDefinitionCommand updateCommand = - new UpdateAlarmDefinitionCommand("92% CPU", "Description1", exprStr, matchBy, "LOW", true, - alarmActions, okActions, undeterminedActions); - - when(repo.findById(TENANT_ID, secondAlarmDef.getId())).thenReturn(secondAlarmDef); - when(repo.findById(TENANT_ID, firstAlarmDef.getId())).thenReturn(firstAlarmDef); - when(repo.exists(TENANT_ID, "92% CPU")).thenReturn(secondAlarmDef.getId()); - - Map oldSubExpressions = new HashMap<>(); - oldSubExpressions.put("444", AlarmSubExpression.of(EXPR1)); - oldSubExpressions.put("555", AlarmSubExpression.of(EXPR2)); - when(repo.findSubExpressions(eq("234"))).thenReturn(oldSubExpressions); - AlarmExpression alarmExpression = new AlarmExpression(exprStr); - - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - AlarmDefinition alarmPatched = - service.update(TENANT_ID, secondAlarmDef.getId(), alarmExpression, updateCommand); - assertEquals(alarmPatched.getName(), "92% CPU"); - } - - @Test(expectedExceptions = EntityExistsException.class) - public void testUpdateSameName() { - String exprStr = EXPR1 + " or " + EXPR2; - List alarmActions = Arrays.asList("1", "2", "3"); - List okActions = Arrays.asList("2", "3"); - List undeterminedActions = Arrays.asList("3"); - List matchBy = Arrays.asList("service", "instance_id"); - - AlarmDefinition firstAlarmDef = - new AlarmDefinition("123", "91% CPU", "description1", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - AlarmDefinition secondAlarmDef = - new AlarmDefinition("234", "92% CPU", "description2", "LOW", exprStr, matchBy, true, alarmActions, - okActions, undeterminedActions); - - UpdateAlarmDefinitionCommand updateCommand = new UpdateAlarmDefinitionCommand( - "91% CPU", "Description1", exprStr, matchBy, "LOW", true, - alarmActions, okActions, undeterminedActions); - - when(repo.findById(TENANT_ID, secondAlarmDef.getId())).thenReturn(secondAlarmDef); - when(repo.findById(TENANT_ID, firstAlarmDef.getId())).thenReturn(firstAlarmDef); - when(repo.exists(TENANT_ID, "91% CPU")).thenReturn("123"); - when(notificationMethodRepo.exists(eq(TENANT_ID), anyString())).thenReturn(true); - - AlarmExpression alarmExpression = new AlarmExpression(exprStr); - service.update(TENANT_ID, secondAlarmDef.getId(),alarmExpression,updateCommand); - - } -} diff --git a/java/src/test/java/monasca/api/app/command/CreateAlarmDefinitionCommandTest.java b/java/src/test/java/monasca/api/app/command/CreateAlarmDefinitionCommandTest.java deleted file mode 100644 index 484e79b58..000000000 --- a/java/src/test/java/monasca/api/app/command/CreateAlarmDefinitionCommandTest.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.command; - -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.testng.annotations.Test; - -import monasca.api.domain.model.AbstractModelTest; - -@Test -public class CreateAlarmDefinitionCommandTest extends AbstractModelTest { - public void shouldDeserializeFromJson() throws Exception { - Map dimensions = new HashMap(); - dimensions.put("instanceId", "392633"); - /** todo: Check the null value to get works **/ - CreateAlarmDefinitionCommand newAlarm = - new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - "avg(hpcs.compute:cpu:1:{instance_id=5}) > 5", null, null, Arrays.asList("123345345", - "23423"), null, null); - - String json = jsonFixture("fixtures/newAlarm.json"); - CreateAlarmDefinitionCommand alarm = fromJson(json, CreateAlarmDefinitionCommand.class); - assertEquals(alarm, newAlarm); - } -} diff --git a/java/src/test/java/monasca/api/app/command/CreateNotificationMethodTest.java b/java/src/test/java/monasca/api/app/command/CreateNotificationMethodTest.java deleted file mode 100644 index efc869c03..000000000 --- a/java/src/test/java/monasca/api/app/command/CreateNotificationMethodTest.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.command; - -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; -import javax.ws.rs.WebApplicationException; - -import org.apache.commons.lang3.StringUtils; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import com.fasterxml.jackson.databind.JsonMappingException; - -import monasca.api.app.command.CreateNotificationMethodCommand; -import monasca.api.domain.model.AbstractModelTest; - - -@Test -public class CreateNotificationMethodTest extends AbstractModelTest { - - private static final String NOTIFICATION_METHOD_WEBHOOK = "WEBHOOK"; - private static final String NOTIFICATION_METHOD_EMAIL = "EMAIL"; - private static final String NOTIFICATION_METHOD_PAGERDUTY = "PAGERDUTY"; - - private static Validator validator; - private List validPeriods = Arrays.asList(0, 60); - - @BeforeClass - public static void setUp() { - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - validator = factory.getValidator(); - } - - public void shouldDeserializeFromJson() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", "0"); - - String json = jsonFixture("fixtures/newNotificationMethod.json"); - CreateNotificationMethodCommand other = fromJson(json, CreateNotificationMethodCommand.class); - assertEquals(other, newNotificationMethod); - } - - public void shouldDeserializeFromJsonLowerCaseEnum() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", "0"); - - String json = jsonFixture("fixtures/newNotificationMethodWithLowercaseEnum.json"); - CreateNotificationMethodCommand other = fromJson(json, CreateNotificationMethodCommand.class); - assertEquals(other, newNotificationMethod); - } - - public void shouldDeserializeFromJsonDefinedPeriod() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://somedomain.com", "60"); - - String json = jsonFixture("fixtures/newNotificationMethodWithPeriod.json"); - CreateNotificationMethodCommand other = fromJson(json, CreateNotificationMethodCommand.class); - assertEquals(other, newNotificationMethod); - } - - @Test(expectedExceptions = java.lang.AssertionError.class) - public void shouldDeserializeFromJsonEnumError() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", "0"); - - String json = jsonFixture("fixtures/newNotificationMethodWithInvalidEnum.json"); - CreateNotificationMethodCommand other = fromJson(json, CreateNotificationMethodCommand.class); - assertEquals(other, newNotificationMethod); - } - - public void testValidationForEmail() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "name@domain.com", "0"); - newNotificationMethod.validate(validPeriods ); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForEmail() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "name@domain.", "0"); - newNotificationMethod.validate(validPeriods ); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForNonZeroPeriodForEmail() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "name@domain.", "60"); - newNotificationMethod.validate(validPeriods); - } - - public void testValidationForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://somedomain.com", "0"); - newNotificationMethod.validate(validPeriods); - } - - public void testValidationNonZeroPeriodForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://somedomain.com", "60"); - newNotificationMethod.validate(validPeriods); - } - - public void testValidationTestDomainForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.test", "60"); - newNotificationMethod.validate(validPeriods); - } - - public void testValidationTestDomainWithPortForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.test:4522", "60"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationInvalidTestDomainForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.invalid:4522", "60"); - newNotificationMethod.validate(validPeriods ); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationTestDomainWithInvalidPortForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.test:4522AA/mywebhook", "60"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationTestDomainWithInvalidMultiplePortsForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.test:4522:33/mywebhook", "60"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationInvalidDomainForWebhook() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://test.fred", "60"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForWebhook() throws Exception { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "ftp://localhost", "0"); - newNotificationMethod.validate(validPeriods); - } - - public void testValidationForPagerduty() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyPagerduty", NOTIFICATION_METHOD_PAGERDUTY, "nzH2LVRdMzun11HNC2oD", "0"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForNonZeroPeriodForPagerDuty() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyPagerduty", NOTIFICATION_METHOD_PAGERDUTY, "nzH2LVRdMzun11HNC2oD", "60"); - newNotificationMethod.validate(validPeriods ); - } - - public void testValidationForMaxNameAddress() { - String name = StringUtils.repeat("A", 250); - assertEquals(name.length(), 250); - String address = "http://" + StringUtils.repeat("A", 502) + ".io"; - assertEquals(address.length(), 512); - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand(name, NOTIFICATION_METHOD_WEBHOOK, address, "0"); - Set> constraintViolations = - validator.validate(newNotificationMethod); - - assertEquals(constraintViolations.size(), 0); - } - - public void testValidationExceptionForExceededNameLength() { - String name = StringUtils.repeat("A", 251); - assertEquals(name.length(), 251); - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand(name, NOTIFICATION_METHOD_WEBHOOK, "http://somedomain.com", "0"); - Set> constraintViolations = - validator.validate(newNotificationMethod); - - assertEquals(constraintViolations.size(), 1); - assertEquals(constraintViolations.iterator().next().getMessage(), - "size must be between 1 and 250"); - } - - public void testValidationExceptionForExceededAddressLength() { - String address = "http://" + StringUtils.repeat("A", 503) + ".io"; - assertEquals(address.length(), 513); - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, address, "0"); - Set> constraintViolations = - validator.validate(newNotificationMethod); - - assertEquals(constraintViolations.size(), 1); - assertEquals(constraintViolations.iterator().next().getMessage(), - "size must be between 1 and 512"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForNonIntPeriod() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "name@domain.com", "interval"); - newNotificationMethod.validate(validPeriods); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void testValidationExceptionForInvalidPeriod() { - CreateNotificationMethodCommand newNotificationMethod = - new CreateNotificationMethodCommand("MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://somedomain.com", "10"); - newNotificationMethod.validate(validPeriods); - } -} diff --git a/java/src/test/java/monasca/api/app/validation/AlarmExpressionsTest.java b/java/src/test/java/monasca/api/app/validation/AlarmExpressionsTest.java deleted file mode 100644 index 1775ae124..000000000 --- a/java/src/test/java/monasca/api/app/validation/AlarmExpressionsTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.validation; - -import static org.testng.Assert.assertEquals; - -import javax.ws.rs.WebApplicationException; - -import org.testng.annotations.Test; - -import com.google.common.collect.ImmutableMap; -import monasca.common.model.alarm.AlarmExpression; -import monasca.common.model.metric.MetricDefinition; - -@Test -public class AlarmExpressionsTest { - public void shouldNormalizeFields() { - AlarmExpression expr = - AlarmValidation - .validateNormalizeAndGet("avg(hpcs.compute.net_out_bytes{instance_id=5, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, az=1}) > 4"); - MetricDefinition metricDef = expr.getSubExpressions().get(0).getMetricDefinition(); - - assertEquals(metricDef.name, "hpcs.compute.net_out_bytes"); - assertEquals( - metricDef.dimensions, - ImmutableMap.builder().put("instance_id", "5") - .put("instance_uuid", "0ff588fc-d298-482f-bb11-4b52d56801a4").put("az", "1").build()); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnInvalidOperator() throws Exception { - AlarmValidation.validateNormalizeAndGet("avg(hpcs.compute.net_out_bytes) & abc"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnDuplicateDimensions() throws Exception { - AlarmValidation - .validateNormalizeAndGet("avg(hpcs.compute.net_out_bytes{instance_id=5, instance_id=4}) > 4"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnInvalidThreshold() throws Exception { - AlarmValidation.validateNormalizeAndGet("avg(hpcs.compute.net_out_bytes) > abc"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnMalformedDeterministicKeyword() throws Exception { - AlarmValidation.validateNormalizeAndGet("avg(hpcs.compute.net_out_bytes,determ) > 1"); - } -} diff --git a/java/src/test/java/monasca/api/app/validation/DimensionsTest.java b/java/src/test/java/monasca/api/app/validation/DimensionsTest.java deleted file mode 100644 index 9ef5805aa..000000000 --- a/java/src/test/java/monasca/api/app/validation/DimensionsTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.validation; - -import static org.testng.Assert.assertEquals; - -import java.util.HashMap; -import java.util.Map; - -import javax.ws.rs.WebApplicationException; - -import org.apache.commons.lang3.StringUtils; -import org.testng.annotations.Test; - -@Test -public class DimensionsTest { - @SuppressWarnings("serial") - public void shouldNormalize() { - Map dimensions = new HashMap(); - dimensions.put(" abc ", " 1 2 3 "); - dimensions.put(" ezaz", "do re mi "); - dimensions.put(" ", " "); - - assertEquals(DimensionValidation.normalize(dimensions), new HashMap() { - { - put("abc", "1 2 3"); - put("ezaz", "do re mi"); - put(null, null); - } - }); - } - - @Test(expectedExceptions = WebApplicationException.class) - @SuppressWarnings("serial") - public void shouldThrowOnEmptyDimensionValue() { - DimensionValidation.validate(new HashMap() { - { - put("abc", "1 2 3"); - put("ezaz", "do re mi"); - put("abc", null); - } - }); - } - - public void shouldValidateKey() { - DimensionValidation.validateName("this.is_a.valid-key"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateKeyWithEmptyKey() { - DimensionValidation.validateName(""); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateKeyWithLongKey() { - String key = StringUtils.repeat("A", 256); - DimensionValidation.validateName(key); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateKeyWithStartingUnderscore() { - DimensionValidation.validateName("_key"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateKeyWithInvalidCharKey() { - DimensionValidation.validateName("this{}that"); - } - - public void shouldValidateValue() { - DimensionValidation.validateValue("this.is_a.valid-value", "valid_name"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateValueWithEmptyValue() { - DimensionValidation.validateValue("", "valid_name"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateValueWithLongValue() { - String value = StringUtils.repeat("A", 256); - DimensionValidation.validateValue(value, "valid_name"); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldErrorOnValidateValueWithInvalidCharValue() { - DimensionValidation.validateValue("this{}that", "valid_name"); - } -} diff --git a/java/src/test/java/monasca/api/app/validation/ValidationTest.java b/java/src/test/java/monasca/api/app/validation/ValidationTest.java deleted file mode 100644 index 7968d6c65..000000000 --- a/java/src/test/java/monasca/api/app/validation/ValidationTest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.validation; - -import static org.testng.Assert.assertEquals; - -import org.testng.annotations.Test; - -import java.util.Map; - -@Test -public class ValidationTest { - public void testSimpleParseAndValidateDimensions() { - final Map dimensions = Validation.parseAndValidateDimensions("aa:bb,cc:dd"); - assertEquals(dimensions.size(), 2); - assertEquals(dimensions.get("aa"), "bb"); - assertEquals(dimensions.get("cc"), "dd"); - } - - public void testParseAndValidateDimensionsWithColon() { - final Map dimensions = Validation.parseAndValidateDimensions("aa:bb,url:http://localhost:8081/healthcheck"); - assertEquals(dimensions.size(), 2); - assertEquals(dimensions.get("aa"), "bb"); - assertEquals(dimensions.get("url"), "http://localhost:8081/healthcheck"); - } -} diff --git a/java/src/test/java/monasca/api/app/validation/ValueMetaValidationTest.java b/java/src/test/java/monasca/api/app/validation/ValueMetaValidationTest.java deleted file mode 100644 index 90a9de83d..000000000 --- a/java/src/test/java/monasca/api/app/validation/ValueMetaValidationTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.app.validation; - -import static org.testng.Assert.assertEquals; - -import java.util.HashMap; -import java.util.Map; - -import javax.ws.rs.WebApplicationException; - -import org.testng.annotations.Test; - -@Test -public class ValueMetaValidationTest { - @Test - @SuppressWarnings("serial") - public void shouldNormalize() { - Map valueMeta = new HashMap(); - final String value1_with_spaces = " 1 2 3 "; - valueMeta.put(" abc ", value1_with_spaces); - final String value2_with_spaces = "do re mi "; - valueMeta.put(" ezaz", value2_with_spaces); - - assertEquals(ValueMetaValidation.normalize(valueMeta), new HashMap() { - { - put("abc", value1_with_spaces); - put("ezaz", value2_with_spaces); - } - }); - } - - @SuppressWarnings("serial") - public void emptyOk() { - ValueMetaValidation.validate(new HashMap() { - { - } - }); - } - - public void maxOk() { - final Map valueMeta = new HashMap(); - for (int i = 0; i < 16; i++) { - // - // All 16 name/value pairs (converted to json) must fit in 2048 - // chars. Test that we can fit 1/16th of 2048 in each pair (128 chars): - // - // {"name":"value"}, ... - // ^^ ^^^ ^^^ <-- extra chars (8 per pair) - // - valueMeta.put(makeString(i, 10), makeString(i, (128 - (10+8)))); - } - ValueMetaValidation.validate(valueMeta); - } - - @SuppressWarnings("serial") - public void emptyValueOk() { - final String key = "noValue"; - HashMap emptyValue = new HashMap() { - { - put(key, ""); - } - }; - ValueMetaValidation.validate(emptyValue); - assertEquals(ValueMetaValidation.normalize(emptyValue), new HashMap() { - { - put(key, ""); - } - }); - } - - private String makeString(int num, int len) { - final StringBuilder builder = new StringBuilder(len); - while (builder.length() < len) { - builder.append(num); - builder.append('-'); - } - builder.setLength(len); - return builder.toString(); - } - - @Test(expectedExceptions = WebApplicationException.class) - @SuppressWarnings("serial") - public void shouldThrowOnEmptyValueMetaName() { - ValueMetaValidation.validate(new HashMap() { - { - put("abc", " 1 2 3 "); - put("ezaz", "do re mi "); - put(" ", "Bad"); - } - }); - } - - @Test(expectedExceptions = WebApplicationException.class) - @SuppressWarnings("serial") - public void shouldThrowOnNullValueMetaName() { - ValueMetaValidation.validate(new HashMap() { - { - put("abc", " 1 2 3 "); - put("ezaz", "do re mi "); - put(null, "Bad"); - } - }); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnTooManyValueMeta() { - final Map valueMeta = new HashMap(); - for (int i = 0; i < 17; i++) { - valueMeta.put(makeString(i, 255), makeString(i, 2048)); - } - ValueMetaValidation.validate(valueMeta); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnValueMetaNameTooLarge() { - final Map valueMeta = new HashMap(); - for (int i = 0; i < 16; i++) { - valueMeta.put(makeString(i, 256), makeString(i, 2048)); - } - ValueMetaValidation.validate(valueMeta); - } - - @Test(expectedExceptions = WebApplicationException.class) - public void shouldThrowOnValueMetaValueTooLarge() { - final Map valueMeta = new HashMap(); - for (int i = 0; i < 16; i++) { - valueMeta.put(makeString(i, 255), makeString(i, 2049)); - } - ValueMetaValidation.validate(valueMeta); - } -} diff --git a/java/src/test/java/monasca/api/domain/model/AbstractModelTest.java b/java/src/test/java/monasca/api/domain/model/AbstractModelTest.java deleted file mode 100644 index cfe52bc18..000000000 --- a/java/src/test/java/monasca/api/domain/model/AbstractModelTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.domain.model; - -import java.io.IOException; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.datatype.joda.JodaModule; -import monasca.api.resource.serialization.SubAlarmExpressionSerializer; - -/** - * Base model test. - */ -public abstract class AbstractModelTest { - public static final ObjectMapper MAPPER; - - static { - MAPPER = new ObjectMapper(); - MAPPER - .setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - MAPPER.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY); - MAPPER.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); - SimpleModule module = new SimpleModule("SerializationModule"); - module.addSerializer(new SubAlarmExpressionSerializer()); - MAPPER.registerModule(module); - MAPPER.registerModule(new JodaModule()); - } - - protected String toJson(Object object) throws IOException { - return MAPPER.writeValueAsString(object); - } - - protected T fromJson(String json, Class type) throws Exception { - return MAPPER.readValue(json, type); - } -} diff --git a/java/src/test/java/monasca/api/domain/model/AlarmDefinitionTest.java b/java/src/test/java/monasca/api/domain/model/AlarmDefinitionTest.java deleted file mode 100644 index 13464c363..000000000 --- a/java/src/test/java/monasca/api/domain/model/AlarmDefinitionTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.domain.model; - -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.testng.annotations.Test; - -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.common.Link; - -@Test -public class AlarmDefinitionTest extends AbstractModelTest { - private final AlarmDefinition alarm; - private final Map dimensions; - - public AlarmDefinitionTest() { - dimensions = new HashMap(); - dimensions.put("instance_id", "666"); - dimensions.put("image_id", "345"); - alarm = - new AlarmDefinition("123", "90% CPU", null, "LOW", - "avg(hpcs.compute{instance_id=666, image_id=345}) >= 90", - Collections.emptyList(), false, Arrays.asList("123345345", "23423"), null, null); - alarm.setLinks(Arrays - .asList(new Link("self", "https://cloudsvc.example.com/v1.0"))); - } - - public void shouldSerializeToJson() throws Exception { - String json = toJson(alarm); - assertEquals(json, jsonFixture("fixtures/alarm.json")); - } -} diff --git a/java/src/test/java/monasca/api/domain/model/NotificationMethodTest.java b/java/src/test/java/monasca/api/domain/model/NotificationMethodTest.java deleted file mode 100644 index ebc8abe1d..000000000 --- a/java/src/test/java/monasca/api/domain/model/NotificationMethodTest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.domain.model; - -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; - -import org.testng.annotations.Test; - -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.notificationmethod.NotificationMethod; - - -@Test -public class NotificationMethodTest extends AbstractModelTest { - private final NotificationMethod notificationMethod; - - public NotificationMethodTest() { - notificationMethod = - new NotificationMethod("123", "MyEmail", "EMAIL", "a@b", 0); - notificationMethod.setLinks(Arrays.asList(new Link("self", - "https://cloudsvc.example.com/v1.0"))); - } - - public void shouldSerializeToJson() throws Exception { - String json = toJson(notificationMethod); - assertEquals(json, jsonFixture("fixtures/notificationMethod.json")); - } - - public void shouldDeserializeFromJson() throws Exception { - String json = jsonFixture("fixtures/notificationMethod.json"); - NotificationMethod detail = fromJson(json, NotificationMethod.class); - assertEquals(notificationMethod, detail); - } -} diff --git a/java/src/test/java/monasca/api/domain/model/VersionTest.java b/java/src/test/java/monasca/api/domain/model/VersionTest.java deleted file mode 100644 index d0ed59ce2..000000000 --- a/java/src/test/java/monasca/api/domain/model/VersionTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.domain.model; - -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.testng.annotations.Test; - -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.version.Version; -import monasca.api.domain.model.version.Version.VersionStatus; - -@Test -public class VersionTest extends AbstractModelTest { - private final Version version; - - public VersionTest() { - version = - new Version("1.0", VersionStatus.CURRENT, new DateTime(1355253328000L, DateTimeZone.UTC)); - version.setLinks(Arrays.asList(new Link("self", - "https://cloudsvc.example.com/v1.0"))); - } - - public void shouldSerializeToJson() throws Exception { - String json = toJson(version); - assertEquals(json, jsonFixture("fixtures/version.json")); - } - - public void shouldDeserializeFromJson() throws Exception { - String json = jsonFixture("fixtures/version.json"); - Version detail = fromJson(json, Version.class); - assertEquals(version, detail); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/PersistUtilsTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/PersistUtilsTest.java deleted file mode 100644 index 3be7fc9b4..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/PersistUtilsTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence; - -import static org.testng.Assert.assertEquals; - -import org.testng.annotations.Test; - -import java.text.ParseException; - -@Test -public class PersistUtilsTest { - private final PersistUtils persistUtils = new PersistUtils(); - - public void test3DigitWithSpace() throws ParseException { - checkParseTimestamp("2016-01-01 01:01:01.123Z", "2016-01-01 01:01:01.123Z"); - } - - public void test2DigitWithSpace() throws ParseException { - checkParseTimestamp("2016-01-01 01:01:01.15Z", "2016-01-01 01:01:01.150Z"); - } - - public void test1DigitWithSpace() throws ParseException { - checkParseTimestamp("2016-01-01 01:01:01.1Z", "2016-01-01 01:01:01.100Z"); - } - - public void test3DigitWithT() throws ParseException { - checkParseTimestamp("2016-01-01T01:01:01.123Z", "2016-01-01T01:01:01.123Z"); - } - - public void test2DigitWithT() throws ParseException { - checkParseTimestamp("2016-01-01T01:01:01.15Z", "2016-01-01T01:01:01.150Z"); - } - - public void test1DigitWithT() throws ParseException { - checkParseTimestamp("2016-01-01T01:01:01.1Z", "2016-01-01T01:01:01.100Z"); - } - - private void checkParseTimestamp(final String start, final String expected) throws ParseException { - assertEquals(persistUtils.parseTimestamp(start).getTime(), persistUtils.parseTimestamp(expected).getTime()); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueriesTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueriesTest.java deleted file mode 100644 index c872d3d25..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/SubAlarmDefinitionQueriesTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence; - -import java.util.HashMap; -import java.util.Map; - -import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; - -@Test -public class SubAlarmDefinitionQueriesTest { - public void metricQueriesSubAlarmQueriesTest1() { - String expectedResult = - " inner join sub_alarm_definition_dimension d0 on d0.dimension_name = :dname0 " - + "and d0.value = :dvalue0 and dim.sub_alarm_definition_id = d0.sub_alarm_definition_id inner join " - + "sub_alarm_definition_dimension d1 on d1.dimension_name = :dname1 and d1.value = :dvalue1 and dim" - + ".sub_alarm_definition_id = d1.sub_alarm_definition_id"; - Map dimsMap = new HashMap<>(); - dimsMap.put("foo", "bar"); - dimsMap.put("biz", "baz"); - - assertEquals(SubAlarmDefinitionQueries.buildJoinClauseFor(dimsMap), expectedResult); - } - - public void metricQueriesSubAlarmQueriesTest2() { - String expectedResult = ""; - Map dimsMap = new HashMap<>(); - - assertEquals(SubAlarmDefinitionQueries.buildJoinClauseFor(dimsMap), expectedResult); - } - - public void metricQueriesSubAlarmQueriesTest3() { - String expectedResult = ""; - Map dimsMap = null; - - assertEquals(SubAlarmDefinitionQueries.buildJoinClauseFor(dimsMap), expectedResult); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepositoryImplTest.java deleted file mode 100644 index 8458aeb69..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmDefinitionSqlRepositoryImplTest.java +++ /dev/null @@ -1,498 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import com.google.common.base.Joiner; -import com.google.common.base.Optional; -import com.google.common.base.Predicate; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.common.hibernate.db.AlarmActionDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.NotificationMethodDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb; -import monasca.common.model.alarm.AggregateFunction; -import monasca.common.model.alarm.AlarmNotificationMethodType; -import monasca.common.model.alarm.AlarmOperator; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.hibernate.criterion.Projections; -import org.hibernate.criterion.Restrictions; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.annotation.Nullable; - -@Test(groups = "orm") -public class AlarmDefinitionSqlRepositoryImplTest { - - private SessionFactory sessionFactory; - private AlarmDefinitionRepo repo; - private AlarmDefinition alarmDef_123; - private AlarmDefinition alarmDef_234; - private List alarmActions; - private Transaction tx; - - @BeforeMethod - protected void beforeMethod() throws Exception { - this.sessionFactory = HibernateUtil.getSessionFactory(); - this.repo = new AlarmDefinitionSqlRepoImpl(this.sessionFactory); - - alarmActions = new ArrayList<>(); - alarmActions.add("29387234"); - alarmActions.add("77778687"); - - this.prepareData(this.sessionFactory); - - this.tx = this.sessionFactory.openSession().beginTransaction(); - } - - @AfterMethod - protected void afterMethod() throws Exception { - this.tx.rollback(); - - this.sessionFactory.close(); - this.sessionFactory = null; - } - - protected void prepareData(final SessionFactory sessionFactory) { - - Session session = sessionFactory.openSession(); - session.beginTransaction(); - - final AlarmDefinitionDb alarmDefinition123 = new AlarmDefinitionDb() - .setTenantId("bob") - .setName("90% CPU") - .setSeverity(AlarmSeverity.HIGH) - .setExpression("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10") - .setMatchBy("flavor_id,image_id") - .setActionsEnabled(true); - session.save(alarmDefinition123.setId("123")); - - final SubAlarmDefinitionDb subAlarmDefinition111 = new SubAlarmDefinitionDb() - .setAlarmDefinition(alarmDefinition123) - .setFunction("avg") - .setMetricName("hpcs.compute") - .setOperator(AlarmOperator.GT) - .setThreshold(10d) - .setPeriod(60) - .setPeriods(1); - session.save(subAlarmDefinition111.setId("111")); - - final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionFlavor777 = new SubAlarmDefinitionDimensionDb() - .setDimensionName("flavor_id") - .setValue("777"); - final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionImageId888 = new SubAlarmDefinitionDimensionDb() - .setDimensionName("image_id") - .setValue("888"); - final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionFlavorMetricNameCpu = new SubAlarmDefinitionDimensionDb() - .setDimensionName("metric_name") - .setValue("cpu"); - final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionDevice1 = new SubAlarmDefinitionDimensionDb() - .setDimensionName("device") - .setValue("1"); - - session.save(subAlarmDefinitionDimensionFlavor777.setSubExpression(subAlarmDefinition111)); - session.save(subAlarmDefinitionDimensionImageId888.setSubExpression(subAlarmDefinition111)); - session.save(subAlarmDefinitionDimensionFlavorMetricNameCpu.setSubExpression(subAlarmDefinition111)); - session.save(subAlarmDefinitionDimensionDevice1.setSubExpression(subAlarmDefinition111)); - - final NotificationMethodDb notificationMethodDb29387234 = new NotificationMethodDb() - .setAddress("root@localhost") - .setName("root2938723") - .setTenantId("bob") - .setType(AlarmNotificationMethodType.EMAIL) - .setPeriod(60); - final NotificationMethodDb notificationMethodDb77778687 = new NotificationMethodDb() - .setAddress("root@localhost") - .setName("root77778687") - .setTenantId("bob") - .setType(AlarmNotificationMethodType.EMAIL) - .setPeriod(60); - - session.save(notificationMethodDb29387234.setId("29387234")); - session.save(notificationMethodDb77778687.setId("77778687")); - - final AlarmActionDb alarmAction29387234 = new AlarmActionDb() - .setActionId("29387234") - .setAlarmDefinition(alarmDefinition123) - .setAlarmState(AlarmState.ALARM); - final AlarmActionDb alarmAction77778687 = new AlarmActionDb() - .setActionId("77778687") - .setAlarmDefinition(alarmDefinition123) - .setAlarmState(AlarmState.ALARM); - - session.save(alarmAction29387234); - session.save(alarmAction77778687); - - final AlarmDefinitionDb alarmDefinition234 = new AlarmDefinitionDb() - .setTenantId("bob") - .setName("50% CPU") - .setSeverity(AlarmSeverity.LOW) - .setExpression("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100") - .setMatchBy("flavor_id,image_id") - .setActionsEnabled(true); - session.save(alarmDefinition234.setId("234")); - - final SubAlarmDefinitionDb subAlarmDefinition222 = new SubAlarmDefinitionDb() - .setAlarmDefinition(alarmDefinition234) - .setFunction("avg") - .setMetricName("hpcs.compute") - .setOperator(AlarmOperator.GT) - .setThreshold(20d) - .setPeriod(60) - .setPeriods(1); - final SubAlarmDefinitionDb subAlarmDefinition223 = new SubAlarmDefinitionDb() - .setAlarmDefinition(alarmDefinition234) - .setFunction("avg") - .setMetricName("hpcs.compute") - .setOperator(AlarmOperator.LT) - .setThreshold(100d) - .setPeriod(60) - .setPeriods(1); - - session.save(subAlarmDefinition222.setId("222")); - session.save(subAlarmDefinition223.setId("223")); - - session.save( - new SubAlarmDefinitionDimensionDb().setDimensionName("flavor_id").setValue("777").setSubExpression(subAlarmDefinition222) - ); - session.save( - new SubAlarmDefinitionDimensionDb().setDimensionName("image_id").setValue("888").setSubExpression(subAlarmDefinition222) - ); - session.save( - new SubAlarmDefinitionDimensionDb().setDimensionName("metric_name").setValue("mem").setSubExpression(subAlarmDefinition222) - ); - - session.save( - new AlarmActionDb().setAlarmDefinition(alarmDefinition234).setAlarmState(AlarmState.ALARM).setActionId("29387234") - ); - session.save( - new AlarmActionDb().setAlarmDefinition(alarmDefinition234).setAlarmState(AlarmState.ALARM).setActionId("77778687") - ); - - session.getTransaction().commit(); - session.close(); - - alarmDef_123 = - new AlarmDefinition("123", "90% CPU", null, "HIGH", "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.emptyList(), - Collections.emptyList()); - alarmDef_234 = - new AlarmDefinition("234", "50% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.emptyList(), - Collections.emptyList()); - - } - - @Test(groups = "orm") - public void shouldCreate() { - Session session = null; - - long subAlarmDimensionSize; - long subAlarmSize; - - Map subExpressions = - ImmutableMap.builder() - .put("4433", AlarmSubExpression.of("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10")).build(); - - AlarmDefinition alarmA = - repo.create("555", "2345", "90% CPU", null, "LOW", "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10", subExpressions, - Arrays.asList("flavor_id", "image_id"), alarmActions, null, null); - AlarmDefinition alarmB = repo.findById("555", alarmA.getId()); - - assertEquals(alarmA.getId(), alarmB.getId()); - assertEquals(alarmA.getName(), alarmB.getName()); - assertEquals(alarmA.getAlarmActions().size(), alarmB.getAlarmActions().size()); - - for (String alarmAction : alarmA.getAlarmActions()) { - assertTrue(alarmB.getAlarmActions().contains(alarmAction)); - } - - // Assert that sub-alarm and sub-alarm-dimensions made it to the db - try { - session = sessionFactory.openSession(); - - subAlarmSize = (Long) session - .createCriteria(SubAlarmDefinitionDb.class) - .add(Restrictions.eq("id", "4433")) - .setProjection(Projections.rowCount()) - .uniqueResult(); - - subAlarmDimensionSize = (Long) session.createCriteria(SubAlarmDefinitionDimensionDb.class) - .add(Restrictions.eq("subAlarmDefinitionDimensionId.subExpression.id", "4433")) - .setProjection(Projections.rowCount()) - .uniqueResult(); - - } finally { - if (session != null) { - session.close(); - } - } - assertEquals(subAlarmSize, (long) 1); - assertEquals(subAlarmDimensionSize, (long) 3); - } - - @Test(groups = "orm") - public void shouldUpdate() { - - List oldSubAlarmIds = Arrays.asList("222"); - AlarmSubExpression changedSubExpression = AlarmSubExpression.of("avg(hpcs.compute) <= 200"); - Map changedSubExpressions = - ImmutableMap.builder().put("223", changedSubExpression).build(); - AlarmSubExpression newSubExpression = AlarmSubExpression.of("avg(foo{flavor_id=777}) > 333"); - Map newSubExpressions = ImmutableMap.builder().put("555", newSubExpression).build(); - - repo.update("bob", "234", false, "90% CPU", null, "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", - Arrays.asList("flavor_id", "image_id"), "LOW", false, oldSubAlarmIds, changedSubExpressions, newSubExpressions, alarmActions, null, null); - - AlarmDefinition alarm = repo.findById("bob", "234"); - AlarmDefinition expected = - new AlarmDefinition("234", "90% CPU", null, "LOW", "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", Arrays.asList("flavor_id", - "image_id"), false, alarmActions, Collections.emptyList(), Collections.emptyList()); - - assertEquals(expected.getId(), alarm.getId()); - assertEquals(expected.getName(), alarm.getName()); - assertEquals(expected.getExpressionData(), alarm.getExpressionData()); - assertEquals(expected.getAlarmActions().size(), alarm.getAlarmActions().size()); - for (String alarmAction : expected.getAlarmActions()) { - assertTrue(alarm.getAlarmActions().contains(alarmAction)); - } - - Map subExpressions = repo.findSubExpressions("234"); - assertEquals(subExpressions.get("223"), changedSubExpression); - assertEquals(subExpressions.get("555"), newSubExpression); - } - - @Test(groups = "orm") - public void shouldFindById() { - Session session = null; - AlarmDefinition alarmDef_123_repo = repo.findById("bob", "123"); - assertEquals(alarmDef_123.getDescription(), alarmDef_123_repo.getDescription()); - assertEquals(alarmDef_123.getExpression(), alarmDef_123_repo.getExpression()); - assertEquals(alarmDef_123.getExpressionData(), alarmDef_123_repo.getExpressionData()); - assertEquals(alarmDef_123.getName(), alarmDef_123_repo.getName()); - // Make sure it still finds AlarmDefinitions with no notifications - try { - session = sessionFactory.openSession(); - - session.createQuery("delete from AlarmActionDb").executeUpdate(); - - } finally { - if (session != null) { - session.close(); - } - } - alarmDef_123.setAlarmActions(new ArrayList(0)); - assertEquals(alarmDef_123, repo.findById("bob", "123")); - } - - @Test(groups = "orm") - public void shouldFindSubAlarmMetricDefinitions() { - - assertEquals(repo.findSubAlarmMetricDefinitions("123").get("111"), new MetricDefinition("hpcs.compute", ImmutableMap.builder() - .put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu").put("device", "1").build())); - - assertEquals(repo.findSubAlarmMetricDefinitions("234").get("222"), new MetricDefinition("hpcs.compute", ImmutableMap.builder() - .put("flavor_id", "777").put("image_id", "888").put("metric_name", "mem").build())); - - assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty()); - } - - @Test(groups = "orm") - public void shouldFindSubExpressions() { - - assertEquals(repo.findSubExpressions("123").get("111"), new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute", - ImmutableMap.builder().put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu").put("device", "1").build()), - AlarmOperator.GT, 10, 60, 1)); - - assertEquals(repo.findSubExpressions("234").get("223"), new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute", - new HashMap()), AlarmOperator.LT, 100, 60, 1)); - - assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty()); - } - - @Test(groups = "orm") - public void testExists() { - assertEquals(repo.exists("bob", "90% CPU"), "123"); - - // Negative - assertNull(repo.exists("bob", "999% CPU")); - } - - @Test(groups = "orm") - public void shouldDeleteById() { - repo.deleteById("bob", "123"); - - try { - assertNull(repo.findById("bob", "123")); - fail(); - } catch (EntityNotFoundException expected) { - } - assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, null, null, null, 1)); - } - - public void shouldFindByDimension() { - final Map dimensions = new HashMap<>(); - dimensions.put("image_id", "888"); - - List result = repo.find("bob", null, dimensions, null, null, null, 1); - - assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), result); - - dimensions.clear(); - dimensions.put("device", "1"); - assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", null, dimensions, null, null, null, 1)); - - dimensions.clear(); - dimensions.put("Not real", "AA"); - assertEquals(0, repo.find("bob", null, dimensions, null, null, null, 1).size()); - } - - public void shouldFindByName() { - final Map dimensions = new HashMap<>(); - dimensions.put("image_id", "888"); - - List result = repo.find("bob", "90% CPU", dimensions, null, null, null, 1); - - assertEquals(Arrays.asList(alarmDef_123), result); - - } - - public void shouldSortBy() { - // null sorts by will sort by ID - this.checkList(repo.find("bob", null, null, null, null, null, 0), - this.alarmDef_123, this.alarmDef_234); - this.checkList(repo.find("bob", null, null, null, Arrays.asList("severity"), null, 0), - this.alarmDef_123, this.alarmDef_234); - this.checkList(repo.find("bob", null, null, null, Arrays.asList("state", "severity"), null, 0), - this.alarmDef_234, this.alarmDef_123); - this.checkList(repo.find("bob", null, null, null, Arrays.asList("name", "state", "severity"), null, 0), - this.alarmDef_234, this.alarmDef_123); - } - - public void shouldFilterBySeverity() { - checkList(repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.HIGH), null, null, 1), - this.alarmDef_123); - checkList(repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.LOW), null, null, 1), - this.alarmDef_234); - checkList(repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.HIGH, AlarmSeverity.LOW), null, null, 1), - this.alarmDef_123, this.alarmDef_234); - } - - public void shouldFindWithOffset() { - // create more alarm definition for this test - final AlarmDefinition localAd1 = new AlarmDefinition("999", "60% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=888, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.emptyList(), - Collections.emptyList()); - final AlarmDefinition localAd2 = new AlarmDefinition("9999", "70% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=999, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 99", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.emptyList(), - Collections.emptyList()); - final AlarmDefinition localAd3 = new AlarmDefinition("99999", "80% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=1111, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 88", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.emptyList(), - Collections.emptyList()); - - final Session session = sessionFactory.openSession(); - session.beginTransaction(); - - for (final AlarmDefinition ad : Lists.newArrayList(localAd1, localAd2, localAd3)) { - final AlarmDefinitionDb adDb = new AlarmDefinitionDb() - .setTenantId("bob") - .setName(ad.getName()) - .setSeverity(AlarmSeverity.valueOf(ad.getSeverity())) - .setExpression(ad.getExpression()) - .setMatchBy(Joiner.on(",").join(ad.getMatchBy())) - .setActionsEnabled(ad.isActionsEnabled()); - - session.save(adDb.setId(ad.getId())); - - for (final String alarmActionId : ad.getAlarmActions()) { - session.save(new AlarmActionDb() - .setActionId(alarmActionId) - .setAlarmDefinition(adDb) - .setAlarmState(AlarmState.ALARM) - ); - } - } - session.getTransaction().commit(); - session.close(); - - // run tests - checkList(repo.find("bob", null, null, null, null, null, 1), this.alarmDef_123, this.alarmDef_234); - checkList(repo.find("bob", null, null, null, null, "1", 1), this.alarmDef_234, localAd1); - checkList(repo.find("bob", null, null, null, null, "2", 1), localAd1, localAd2); - checkList(repo.find("bob", null, null, null, null, "3", 1), localAd2, localAd3); - checkList(repo.find("bob", null, null, null, null, "4", 1), localAd3); - checkList(repo.find("bob", null, null, null, null, "5", 1)); - - checkList(repo.find("bob", null, null, null, null, null, 0), - this.alarmDef_123, this.alarmDef_234, localAd1, localAd2, localAd3); - checkList(repo.find("bob", null, null, null, null, null, 6), - this.alarmDef_123, this.alarmDef_234, localAd1, localAd2, localAd3); - checkList(repo.find("bob", null, null, null, null, "2", 3), - localAd1, localAd2, localAd3); - } - - private void checkList(List found, AlarmDefinition... expected) { - assertEquals(found.size(), expected.length); - AlarmDefinition actual; - - for (final AlarmDefinition alarmDefinition : expected) { - final Optional alarmOptional = FluentIterable - .from(found) - .firstMatch(new Predicate() { - @Override - public boolean apply(@Nullable final AlarmDefinition input) { - assert input != null; - return input.getId().equals(alarmDefinition.getId()); - } - }); - assertTrue(alarmOptional.isPresent()); - - actual = alarmOptional.get(); - assertEquals(actual, alarmDefinition, String.format("%s not equal to %s", actual, alarmDefinition)); - } - - } - -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtilsTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtilsTest.java deleted file mode 100644 index d86331d10..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmHibernateUtilsTest.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import static monasca.api.infrastructure.persistence.hibernate.TestHelper.randomByteArray; -import static org.testng.Assert.assertEquals; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import com.beust.jcommander.internal.Maps; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.AlarmMetricDb; -import monasca.common.hibernate.db.MetricDefinitionDb; -import monasca.common.hibernate.db.MetricDefinitionDimensionsDb; -import monasca.common.hibernate.db.MetricDimensionDb; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -@Test(groups = "orm") -public class AlarmHibernateUtilsTest { - - private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - private static final String LUK_TENANT_ID = "luk"; - private static final String BOB_TENANT_ID = "bob"; - private static final String ALARM_DEF_NAME = "90%"; - private static final String ALARM_DEF_EXPRESSION = "avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10"; - private static final String ALARM_MATCH_BY = "flavor_id,image_id"; - private static final int BINARY_KEY_LENGTH = 20; - private AlarmHibernateUtils repo; - private SessionFactory sessionFactory; - private Transaction tx; - - @BeforeMethod - protected void beforeMethod() { - this.sessionFactory = HibernateUtil.getSessionFactory(); - this.prepareData(this.sessionFactory); - this.repo = new AlarmHibernateUtils(sessionFactory); - - this.tx = this.sessionFactory.openSession().beginTransaction(); - } - - @AfterMethod - protected void afterMethod() throws Exception { - this.tx.rollback(); - - this.sessionFactory.close(); - this.sessionFactory = null; - } - - private void prepareData(final SessionFactory sessionFactory) { - Session session = sessionFactory.openSession(); - - session.beginTransaction(); - - DateTime timestamp1 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:53"); - - final AlarmDefinitionDb alarmDefinitionBob = this.newAlarmDefinition(session, "1", BOB_TENANT_ID); - final AlarmDefinitionDb alarmDefinitionLuk = this.newAlarmDefinition(session, "2", LUK_TENANT_ID); - session.save(alarmDefinitionBob); - session.save(alarmDefinitionLuk); - - final AlarmDb alarmDb1 = new AlarmDb("1", alarmDefinitionBob, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1); - final AlarmDb alarmDb2 = new AlarmDb("2", alarmDefinitionLuk, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1); - session.save(alarmDb1); - session.save(alarmDb2); - - final MetricDefinitionDb md1 = new MetricDefinitionDb(new byte[]{1}, "metric", BOB_TENANT_ID, "eu"); - session.save(md1); - - final MetricDimensionDb mDim1Instance = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "instance_id", "123"); - final MetricDimensionDb mDim1Service = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "service", "monitoring"); - final MetricDimensionDb mDim2Flavor = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "flavor_id", "222"); - session.save(mDim1Instance); - session.save(mDim1Service); - session.save(mDim2Flavor); - - final MetricDefinitionDimensionsDb mdd11 = new MetricDefinitionDimensionsDb(randomByteArray(BINARY_KEY_LENGTH), md1, mDim1Instance.getId().getDimensionSetId()); - final MetricDefinitionDimensionsDb mdd22 = new MetricDefinitionDimensionsDb(randomByteArray(BINARY_KEY_LENGTH), md1, mDim2Flavor.getId().getDimensionSetId()); - session.save(mdd11); - session.save(mdd22); - - session.save(new AlarmMetricDb(alarmDb1, mdd11)); - session.save(new AlarmMetricDb(alarmDb1, mdd22)); - session.save(new AlarmMetricDb(alarmDb2, mdd11)); - - session.getTransaction().commit(); - session.close(); - } - - private AlarmDefinitionDb newAlarmDefinition(final Session session, - final String id, - final String tenantId) { - final String str = "AlarmDefinition" + 1; - final DateTime now = DateTime.now(); - final AlarmDefinitionDb definition = new AlarmDefinitionDb(id, tenantId, ALARM_DEF_NAME, str, ALARM_DEF_EXPRESSION, AlarmSeverity.LOW, ALARM_MATCH_BY, true, now, now, null); - session.save(definition); - return definition; - } - - public void testNullArguments() { - - List result = repo.findAlarmIds(null, null); - - assertEquals(result.size(), 0, "No alarms"); - } - - public void testWithTenantIdNoExist() { - - List result = repo.findAlarmIds("fake_id", null); - - assertEquals(result.size(), 0, "No alarms"); - } - - public void testWithTenantId() { - - List result = repo.findAlarmIds(BOB_TENANT_ID, new HashMap()); - - assertEquals(result.size(), 1, "Alarm found"); - assertEquals(result.get(0), "1", "Alarm with id 1 found"); - - result = repo.findAlarmIds(LUK_TENANT_ID, new HashMap()); - assertEquals(result.size(), 1, "Alarm found"); - assertEquals(result.get(0), "2", "Alarm with id 2 found"); - } - - public void testWithDimensions() { - - Map dimensions = Maps.newHashMap(); - dimensions.put("flavor_id", "222"); - - List result = repo.findAlarmIds(BOB_TENANT_ID, dimensions); - - assertEquals(result.size(), 1, "Alarm found"); - assertEquals(result.get(0), "1", "Alarm with id 1 found"); - } - - public void testWithNotExixtingDimensions() { - - Map dimensions = Maps.newHashMap(); - dimensions.put("a", "b"); - - List result = repo.findAlarmIds(BOB_TENANT_ID, dimensions); - - assertEquals(result.size(), 0, "Alarm not found"); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepositoryImplTest.java deleted file mode 100644 index 4e8c289a5..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/AlarmSqlRepositoryImplTest.java +++ /dev/null @@ -1,561 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertTrue; - -import java.sql.Timestamp; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; -import javax.ws.rs.WebApplicationException; - -import com.google.common.base.Optional; -import com.google.common.base.Predicate; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import com.google.common.collect.Lists; -import org.apache.commons.collections4.CollectionUtils; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.hibernate.criterion.Restrictions; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.AlarmMetricDb; -import monasca.common.hibernate.db.MetricDefinitionDb; -import monasca.common.hibernate.db.MetricDefinitionDimensionsDb; -import monasca.common.hibernate.db.MetricDimensionDb; -import monasca.common.hibernate.db.SubAlarmDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDb; -import monasca.common.model.alarm.AlarmOperator; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; - -@Test(groups = "orm") -public class AlarmSqlRepositoryImplTest { - private static final String TENANT_ID = "bob"; - private static final String ALARM_ID = "234111"; - private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - private static final DateTimeZone UTC_TIMEZONE = DateTimeZone.forID("UTC"); - private SessionFactory sessionFactory; - private AlarmRepo repo; - private Alarm compoundAlarm; - private Alarm alarm1; - private Alarm alarm2; - private Alarm alarm3; - private Transaction tx; - - @BeforeMethod - protected void setupClass() throws Exception { - this.sessionFactory = HibernateUtil.getSessionFactory(); - this.repo = new AlarmSqlRepoImpl(this.sessionFactory); - this.prepareData(this.sessionFactory); - - this.tx = this.sessionFactory.openSession().beginTransaction(); - } - - @AfterMethod - protected void afterMethod() throws Exception { - this.tx.rollback(); - - this.sessionFactory.close(); - this.sessionFactory = null; - } - - private void prepareData(final SessionFactory sessionFactory) { - final DateTime now = new DateTime(); - Session session = null; - try { - session = sessionFactory.openSession(); - session.beginTransaction(); - - DateTime timestamp1 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:53").withZoneRetainFields(UTC_TIMEZONE); - DateTime timestamp2 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:54").withZoneRetainFields(UTC_TIMEZONE); - DateTime timestamp3 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:55").withZoneRetainFields(UTC_TIMEZONE); - DateTime timestamp4 = ISO_8601_FORMATTER.parseDateTime("2015-03-15T09:26:53").withZoneRetainFields(UTC_TIMEZONE); - - final AlarmDefinitionDb alarmDefinition_90Percent = this.newAlarmDefinition(session, - "1", - TENANT_ID, - "90% CPU", - "avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10", - AlarmSeverity.LOW, - "flavor_id,image_id", - true - ); - final AlarmDefinitionDb alarmDefinition_50Percent = this.newAlarmDefinition(session, - "234", - TENANT_ID, - "50% CPU", - "avg(cpu.sys_mem{service=monitoring}) > 20 and avg(cpu.idle_perc{service=monitoring}) < 10", - AlarmSeverity.HIGH, - "hostname,region", - true - ); - - final AlarmDb alarmDb_234111 = new AlarmDb(ALARM_ID, alarmDefinition_50Percent, AlarmState.UNDETERMINED, null, null, timestamp4, timestamp4, timestamp4); - final AlarmDb alarmDb_1 = new AlarmDb("1", alarmDefinition_90Percent, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1); - final AlarmDb alarmDb_2 = new AlarmDb("2", alarmDefinition_90Percent, AlarmState.UNDETERMINED, "OPEN", null, timestamp2, timestamp2, timestamp2); - final AlarmDb alarmDb_3 = new AlarmDb("3", alarmDefinition_90Percent, AlarmState.ALARM, null, "http://somesite.com/this-alarm-info", timestamp3, timestamp3, timestamp3); - - session.save(alarmDb_1); - session.save(alarmDb_2); - session.save(alarmDb_3); - session.save(alarmDb_234111); - - final List alarmDbs = Lists.newArrayList(alarmDb_1, alarmDb_2, alarmDb_3); - - long subAlarmId = 42; - for (int alarmIndex = 0; alarmIndex < 3; alarmIndex++) { - final SubAlarmDefinitionDb subExpression = this.newSubAlarmDefinition(session, String.format("%d", alarmIndex + subAlarmId), alarmDefinition_50Percent); - session.save( - new SubAlarmDb( - String.valueOf(subAlarmId++), - alarmDbs.get(alarmIndex), - subExpression, - "avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10", - now, - now - ) - ); - } - - final MetricDefinitionDb metricDefinition1 = new MetricDefinitionDb(new byte[]{1}, "cpu.idle_perc", "bob", "west"); - session.save(metricDefinition1); - - final MetricDimensionDb metricDimension1InstanceId = new MetricDimensionDb(new byte[]{1}, "instance_id", "123"); - final MetricDimensionDb metricDimensionService = new MetricDimensionDb(new byte[]{1}, "service", "monitoring"); - final MetricDimensionDb metricDimension2FlavorId = new MetricDimensionDb(new byte[]{2}, "flavor_id", "222"); - session.save(metricDimension1InstanceId); - session.save(metricDimensionService); - session.save(metricDimension2FlavorId); - - final MetricDefinitionDimensionsDb metricDefinitionDimensions11 = new MetricDefinitionDimensionsDb( - new byte[]{1, 1}, - metricDefinition1, - metricDimension1InstanceId.getId().getDimensionSetId() - ); - final MetricDefinitionDimensionsDb metricDefinitionDimensions22 = new MetricDefinitionDimensionsDb( - new byte[]{2, 2}, - metricDefinition1, - metricDimension2FlavorId.getId().getDimensionSetId() - ); - session.save(metricDefinitionDimensions11); - session.save(metricDefinitionDimensions22); - - session.save(new AlarmMetricDb(alarmDbs.get(0), metricDefinitionDimensions11)); - session.save(new AlarmMetricDb(alarmDbs.get(0), metricDefinitionDimensions22)); - session.save(new AlarmMetricDb(alarmDbs.get(1), metricDefinitionDimensions11)); - session.save(new AlarmMetricDb(alarmDbs.get(2), metricDefinitionDimensions22)); - - alarm1 = - new Alarm("1", "1", "90% CPU", "LOW", - buildAlarmMetrics( - buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service", "monitoring") - , buildMetricDefinition("cpu.idle_perc", "flavor_id", "222") - ) - , AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, - timestamp1, timestamp1); - - alarm2 = - new Alarm("2", "1", "90% CPU", "LOW", buildAlarmMetrics(buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service", - "monitoring")), AlarmState.UNDETERMINED, "OPEN", null, timestamp2, timestamp2, timestamp2); - - alarm3 = - new Alarm("3", "1", "90% CPU", "LOW", buildAlarmMetrics(buildMetricDefinition("cpu.idle_perc", "flavor_id", "222")), AlarmState.ALARM, - null, "http://somesite.com/this-alarm-info", timestamp3, timestamp3, timestamp3); - - - final SubAlarmDb subAlarmDb1 = new SubAlarmDb("4343", alarmDb_234111, "avg(cpu.sys_mem{service=monitoring}) > 20", now, now); - final SubAlarmDb subAlarmDb2 = new SubAlarmDb("4242", alarmDb_234111, "avg(cpu.idle_perc{service=monitoring}) < 10", now, now); - session.save(subAlarmDb1); - session.save(subAlarmDb2); - - final MetricDefinitionDb metricDefinition111 = new MetricDefinitionDb(new byte[]{1, 1, 1}, "cpu.sys_mem", "bob", "west"); - final MetricDefinitionDb metricDefinition112 = new MetricDefinitionDb(new byte[]{1, 1, 2}, "cpu.idle_perc", "bob", "west"); - session.save(metricDefinition111); - session.save(metricDefinition112); - - final MetricDefinitionDimensionsDb metricDefinitionDimension31 = new MetricDefinitionDimensionsDb( - new byte[]{3, 1}, - metricDefinition111, - new byte[]{2, 1} - ); - final MetricDefinitionDimensionsDb metricDefinitionDimension32 = new MetricDefinitionDimensionsDb( - new byte[]{3, 2}, - metricDefinition112, - new byte[]{2, 2} - ); - session.save(metricDefinitionDimension31); - session.save(metricDefinitionDimension32); - - session.save(new AlarmMetricDb(alarmDb_234111, metricDefinitionDimension31)); - session.save(new AlarmMetricDb(alarmDb_234111, metricDefinitionDimension32)); - - session.save(new MetricDimensionDb(new byte[]{2, 1}, "service", "monitoring")); - session.save(new MetricDimensionDb(new byte[]{2, 2}, "service", "monitoring")); - session.save(new MetricDimensionDb(new byte[]{2, 1}, "hostname", "roland")); - session.save(new MetricDimensionDb(new byte[]{2, 2}, "hostname", "roland")); - session.save(new MetricDimensionDb(new byte[]{2, 1}, "region", "colorado")); - session.save(new MetricDimensionDb(new byte[]{2, 2}, "region", "colorado")); - session.save(new MetricDimensionDb(new byte[]{2, 2}, "extra", "vivi")); - - session.flush(); - session.getTransaction().commit(); - - compoundAlarm = - new Alarm("234111", "234", "50% CPU", "HIGH", buildAlarmMetrics( - buildMetricDefinition("cpu.sys_mem", "hostname", "roland", "region", "colorado", "service", "monitoring"), - buildMetricDefinition("cpu.idle_perc", "extra", "vivi", "hostname", "roland", "region", "colorado", "service", "monitoring")), - AlarmState.UNDETERMINED, null, null, timestamp4, timestamp4, timestamp4); - - } finally { - if (session != null) { - session.close(); - } - } - - } - - private SubAlarmDefinitionDb newSubAlarmDefinition(final Session session, final String id, final AlarmDefinitionDb alarmDefinition) { - final DateTime now = DateTime.now(); - final SubAlarmDefinitionDb db = new SubAlarmDefinitionDb( - id, - alarmDefinition, - String.format("f_%s", id), - String.format("m_%s", id), - AlarmOperator.GT.toString(), - 0.0, - 1, - 2, - now, - now - ); - session.save(db); - return db; - } - - private AlarmDefinitionDb newAlarmDefinition(final Session session, - final String id, - final String tenantId, - final String name, - final String expression, - final AlarmSeverity severity, - final String matchBy, - final boolean actionEnabled) { - final DateTime now = DateTime.now(); - final AlarmDefinitionDb db = new AlarmDefinitionDb(id, tenantId, name, null, expression, severity, matchBy, actionEnabled, now, now, null); - session.save(db); - return db; - } - - private List buildAlarmMetrics(final MetricDefinition... metricDefinitions) { - return Arrays.asList(metricDefinitions); - } - - private MetricDefinition buildMetricDefinition(final String metricName, final String... dimensions) { - final Builder builder = ImmutableMap.builder(); - for (int i = 0; i < dimensions.length; ) { - builder.put(dimensions[i], dimensions[i + 1]); - i += 2; - } - return new MetricDefinition(metricName, builder.build()); - } - - @Test(groups = "orm") - @SuppressWarnings("unchecked") - public void shouldDelete() { - Session session = null; - repo.deleteById(TENANT_ID, ALARM_ID); - try { - - session = sessionFactory.openSession(); - - List rows = session - .createCriteria(AlarmDefinitionDb.class, "ad") - .add(Restrictions.eq("ad.id", "234")) - .setReadOnly(true) - .list(); - - assertEquals(rows.size(), 1, "Alarm Definition was deleted as well"); - - } finally { - if (session != null) { - session.close(); - } - } - } - - @Test(groups = "orm", expectedExceptions = EntityNotFoundException.class) - public void shouldThowExceptionOnDelete() { - repo.deleteById(TENANT_ID, "Not an alarm ID"); - } - - @Test(groups = "orm") - public void shouldFindAlarmSubExpressions() { - final Map subExpressionMap = repo.findAlarmSubExpressions(ALARM_ID); - assertEquals(subExpressionMap.size(), 2); - assertEquals(subExpressionMap.get("4343"), AlarmSubExpression.of("avg(cpu.sys_mem{service=monitoring}) > 20")); - assertEquals(subExpressionMap.get("4242"), AlarmSubExpression.of("avg(cpu.idle_perc{service=monitoring}) < 10")); - } - - @Test(groups = "orm") - public void shouldAlarmSubExpressionsForAlarmDefinition() { - final Map> alarmSubExpressionMap = - repo.findAlarmSubExpressionsForAlarmDefinition(alarm1.getAlarmDefinition().getId()); - assertEquals(alarmSubExpressionMap.size(), 3); - long subAlarmId = 42; - for (int alarmId = 1; alarmId <= 3; alarmId++) { - final Map subExpressionMap = alarmSubExpressionMap.get(String.valueOf(alarmId)); - assertEquals(subExpressionMap.get(String.valueOf(subAlarmId)), - AlarmSubExpression.of("avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10")); - subAlarmId++; - } - } - - @Test(groups = "orm") - public void shouldFind() { - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, null, 1, true), alarm1, alarm2); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, null, 2, true), alarm1, alarm2, alarm3); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, "1", 1, true), alarm2, alarm3); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, "2", 1, true), alarm3, compoundAlarm); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, "3", 1, true), compoundAlarm); - - checkUnsortedList(repo.find("Not a tenant id", null, null, null, null, null, null, null, null, null, null, 1, false)); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, "cpu.idle_perc", null, null, null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.builder().put("flavor_id", "222").build(), null, null, null, null, - null, null, null, 1, false), alarm1, alarm3); - - checkUnsortedList( - repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.builder().put("service", "monitoring").put("hostname", "roland") - .build(), null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), alarm2, compoundAlarm); - - checkUnsortedList( - repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap.builder() - .put("service", "monitoring").build(), null, null, null, null, null, null, null, 1, false), alarm1, alarm2); - - checkUnsortedList(repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", null, null, null, null, null, null, null, null, 1, false), alarm1, - alarm2, alarm3); - - checkUnsortedList( - repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), - compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.builder().put("service", "monitoring").build(), - AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), alarm2, compoundAlarm); - - checkUnsortedList( - repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap.builder() - .put("service", "monitoring").build(), AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), alarm2); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, DateTime.now(UTC_TIMEZONE), null, null, 0, false)); - -// checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-15T00:00:00Z"), null, 0, false), -// compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-14T00:00:00Z"), null, null, 1, false), - alarm1, alarm2, alarm3, compoundAlarm); - - } - - @Test(groups = "orm") - public void shouldFindById() { - - final Alarm alarm = repo.findById(TENANT_ID, compoundAlarm.getId()); - - assertEquals(alarm.getId(), compoundAlarm.getId()); - assertEquals(alarm.getAlarmDefinition(), compoundAlarm.getAlarmDefinition()); - assertEquals(alarm.getCreatedTimestamp(), compoundAlarm.getCreatedTimestamp()); - assertEquals(alarm.getStateUpdatedTimestamp(), compoundAlarm.getStateUpdatedTimestamp()); - assertEquals(alarm.getState(), compoundAlarm.getState()); - assertEquals(alarm.getMetrics().size(), compoundAlarm.getMetrics().size()); - assertTrue(CollectionUtils.isEqualCollection(alarm.getMetrics(), compoundAlarm.getMetrics()), "Metrics not equal"); - } - - @Test(groups = "orm", expectedExceptions = EntityNotFoundException.class) - public void shouldFindByIdThrowException() { - - repo.findById(TENANT_ID, "Not a valid alarm id"); - } - - @Test(groups = "orm") - public void shouldUpdate() throws InterruptedException { - final Alarm originalAlarm = repo.findById(TENANT_ID, ALARM_ID); - final DateTime originalStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID); - final DateTime originalUpdatedAt = getAlarmUpdatedDate(ALARM_ID); - assertEquals(originalAlarm.getState(), AlarmState.UNDETERMINED); - - Thread.sleep(1000); - final Alarm newAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, null, null); - final DateTime newStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID); - final DateTime newUpdatedAt = getAlarmUpdatedDate(ALARM_ID); - assertNotEquals(newStateUpdatedAt.getMillis(), originalStateUpdatedAt.getMillis(), - "state_updated_at did not change"); - assertNotEquals(newUpdatedAt.getMillis(), originalUpdatedAt.getMillis(), - "updated_at did not change"); - - assertEquals(newAlarm, originalAlarm); - - newAlarm.setState(AlarmState.OK); - newAlarm.setStateUpdatedTimestamp(newStateUpdatedAt); - newAlarm.setUpdatedTimestamp(newUpdatedAt); - - // Make sure it was updated in the DB - assertEquals(repo.findById(TENANT_ID, ALARM_ID), newAlarm); - - Thread.sleep(1000); - final Alarm unchangedAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, "OPEN", null); - assertTrue(getAlarmStateUpdatedDate(ALARM_ID).equals(newStateUpdatedAt), "state_updated_at did change"); - assertNotEquals(getAlarmUpdatedDate(ALARM_ID).getMillis(), newStateUpdatedAt, "updated_at did not change"); - assertEquals(unchangedAlarm, newAlarm); - } - - @Test(groups = "orm", expectedExceptions = EntityNotFoundException.class) - public void shouldUpdateThrowException() { - - repo.update(TENANT_ID, "Not a valid alarm id", AlarmState.UNDETERMINED, null, null); - } - - @Test(groups = "orm") - public void shouldFilterBySeverity() { - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.LOW), null, null, null, null, null, 1, false), - alarm1, alarm2, alarm3); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.HIGH), null, null, null, null, null, 1, false), - compoundAlarm); - - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.LOW, AlarmSeverity.HIGH), null, null, null, null, null, 1, false), - alarm1, alarm2, compoundAlarm, alarm3); - - // no alarms for those severities - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.CRITICAL), null, null, null, null, null, 1, false)); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.MEDIUM), null, null, null, null, null, 1, false)); - checkUnsortedList(repo.find(TENANT_ID, null, null, null, null, Lists.newArrayList(AlarmSeverity.CRITICAL, AlarmSeverity.MEDIUM), null, null, null, null, null, 1, false)); - } - - @Test(groups = "orm") - public void shouldSortBy() { - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("state", "severity"), null, 1, false), - alarm1, alarm2, compoundAlarm, alarm3); - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("state desc", "severity"), null, 1, false), - alarm3, alarm2, compoundAlarm, alarm1); - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("state desc", "severity asc"), null, 1, false), - alarm3, alarm2, compoundAlarm, alarm1); - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("state desc", "severity desc"), null, 1, false), - alarm3, compoundAlarm, alarm2, alarm1); - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("severity"), null, 1, false), - alarm1, alarm2, alarm3, compoundAlarm); - checkSortedList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Lists.newArrayList("severity desc", "alarm_id desc"), null, 1, false), - compoundAlarm, alarm3, alarm2, alarm1); - } - - - - private void checkUnsortedList(List found, Alarm... expected) { - this.checkUnsortedList(found, false, expected); - } - - private void checkSortedList(List found, Alarm... expected) { - this.checkUnsortedList(found, true, expected); - } - - private void checkUnsortedList(List found, boolean sorted, Alarm... expected) { - assertEquals(found.size(), expected.length); - Alarm actual; - int actualIndex; - - for (int expectedIndex = 0; expectedIndex < expected.length; expectedIndex++) { - final Alarm alarm = expected[expectedIndex]; - final Optional alarmOptional = FluentIterable - .from(found) - .firstMatch(new Predicate() { - @Override - public boolean apply(@Nullable final Alarm input) { - assert input != null; - return input.getId().equals(alarm.getId()); - } - }); - assertTrue(alarmOptional.isPresent()); - - actual = alarmOptional.get(); - if (sorted) { - actualIndex = found.indexOf(actual); - assertEquals(expectedIndex, actualIndex); - } - assertEquals(actual, alarm, String.format("%s not equal to %s", actual, alarm)); - } - - } - - private DateTime getAlarmUpdatedDate(final String alarmId) { - return this.getDateField(alarmId, "updatedAt"); - } - - private DateTime getAlarmStateUpdatedDate(final String alarmId) { - return this.getDateField(alarmId, "stateUpdatedAt"); - } - - private DateTime getDateField(final String alarmId, final String fieldName) { - Session session = null; - DateTime time = null; - - try { - session = sessionFactory.openSession(); - final String queryString = String.format("select %s from AlarmDb where id = :alarmId", fieldName); - final List rows = session.createQuery(queryString).setString("alarmId", alarmId).list(); - - time = new DateTime(((Timestamp) rows.get(0)).getTime(), UTC_TIMEZONE); - } finally { - if (session != null) { - session.close(); - } - } - - return time; - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/HibernateUtil.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/HibernateUtil.java deleted file mode 100644 index 1bda9d543..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/HibernateUtil.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.Properties; - -import org.hibernate.HibernateException; -import org.hibernate.SessionFactory; -import org.hibernate.boot.registry.StandardServiceRegistryBuilder; -import org.hibernate.cfg.Configuration; -import org.hibernate.service.ServiceRegistry; - -import monasca.common.hibernate.db.AlarmActionDb; -import monasca.common.hibernate.db.AlarmDb; -import monasca.common.hibernate.db.AlarmDefinitionDb; -import monasca.common.hibernate.db.AlarmMetricDb; -import monasca.common.hibernate.db.MetricDefinitionDb; -import monasca.common.hibernate.db.MetricDefinitionDimensionsDb; -import monasca.common.hibernate.db.MetricDimensionDb; -import monasca.common.hibernate.db.NotificationMethodDb; -import monasca.common.hibernate.db.NotificationMethodTypesDb; -import monasca.common.hibernate.db.SubAlarmDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDb; -import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb; - -class HibernateUtil { - - private static Configuration CONFIGURATION = null; - - static { - try { - Configuration configuration = new Configuration(); - - configuration.addAnnotatedClass(AlarmDb.class); - configuration.addAnnotatedClass(AlarmDefinitionDb.class); - configuration.addAnnotatedClass(AlarmMetricDb.class); - configuration.addAnnotatedClass(MetricDefinitionDb.class); - configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class); - configuration.addAnnotatedClass(MetricDimensionDb.class); - configuration.addAnnotatedClass(SubAlarmDefinitionDb.class); - configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class); - configuration.addAnnotatedClass(SubAlarmDb.class); - configuration.addAnnotatedClass(AlarmActionDb.class); - configuration.addAnnotatedClass(NotificationMethodDb.class); - configuration.addAnnotatedClass(NotificationMethodTypesDb.class); - - configuration.setProperties(getHikariH2Properties()); - - HibernateUtil.CONFIGURATION = configuration; - } catch (Throwable ex) { - // Make sure you log the exception, as it might be swallowed - System.err.println("Initial SessionFactory creation failed." + ex); - throw new ExceptionInInitializerError(ex); - } - } - - private static Properties getHikariPostgresProperties() { - Properties properties = new Properties(); - properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider"); - properties.put("hibernate.hbm2ddl.auto", "validate"); - properties.put("show_sql", true); - properties.put("hibernate.hikari.dataSourceClassName", "org.postgresql.ds.PGPoolingDataSource"); - properties.put("hibernate.hikari.dataSource.serverName", "localhost"); - properties.put("hibernate.hikari.dataSource.portNumber", "5432"); - properties.put("hibernate.hikari.dataSource.databaseName", "mon"); - properties.put("hibernate.hikari.dataSource.user", "mon"); - properties.put("hibernate.hikari.dataSource.password", "mon"); - properties.put("hibernate.hikari.dataSource.initialConnections", "25"); - properties.put("hibernate.hikari.dataSource.maxConnections", "100"); - properties.put("hibernate.hikari.connectionTestQuery", "SELECT 1"); - return properties; - } - - private static Properties getHikariMySqlProperties() { - Properties properties = new Properties(); - properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider"); - properties.put("hibernate.hbm2ddl.auto", "validate"); - properties.put("show_sql", true); - properties.put("hibernate.hikari.dataSourceClassName", "com.mysql.jdbc.jdbc2.optional.MysqlDataSource"); - properties.put("hibernate.hikari.dataSource.url", - "jdbc:mysql://192.168.10.4:3306/mon?useLegacyDatetimeCode=false&serverTimezone=UTC"); - properties.put("hibernate.hikari.dataSource.user", "monapi"); - properties.put("hibernate.hikari.dataSource.password", "password"); - return properties; - } - - private static Properties getHikariH2Properties() { - Properties properties = new Properties(); - properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider"); - properties.put("hibernate.hbm2ddl.auto", "create-drop"); - properties.put("show_sql", false); - properties.put("hibernate.hikari.dataSourceClassName", "org.h2.jdbcx.JdbcDataSource"); - properties.put("hibernate.hikari.dataSource.url", "jdbc:h2:mem:mon;MODE=PostgreSQL"); - properties.put("hibernate.hikari.dataSource.user", "sa"); - properties.put("hibernate.hikari.dataSource.password", ""); - return properties; - } - - public static SessionFactory getSessionFactory() throws HibernateException { - ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(CONFIGURATION.getProperties()).build(); - return CONFIGURATION.buildSessionFactory(serviceRegistry); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepositoryImplTest.java deleted file mode 100644 index 179752473..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodSqlRepositoryImplTest.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.joda.time.DateTime; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.common.hibernate.db.NotificationMethodDb; -import monasca.common.hibernate.db.NotificationMethodTypesDb; -import monasca.common.model.alarm.AlarmNotificationMethodType; - -@Test(groups = "orm") -public class NotificationMethodSqlRepositoryImplTest { - NotificationMethodRepo repo = null; - private SessionFactory sessionFactory; - private Transaction tx; - - private static final String NOTIFICATION_METHOD_EMAIL = "EMAIL"; - - @BeforeMethod - protected void beforeMethod() throws Exception { - this.sessionFactory = HibernateUtil.getSessionFactory(); - this.repo = new NotificationMethodSqlRepoImpl(sessionFactory); - - this.prepareData(this.sessionFactory); - - this.tx = this.sessionFactory.openSession().beginTransaction(); - } - - @AfterMethod - protected void afterMethod() throws Exception { - this.tx.rollback(); - - this.sessionFactory.close(); - this.sessionFactory = null; - } - - protected void prepareData(final SessionFactory sessionFactory) { - Session session = null; - try { - session = sessionFactory.openSession(); - - session.beginTransaction(); - - NotificationMethodDb notificationMethodDb1 = - new NotificationMethodDb("123", "444", "MyEmail", AlarmNotificationMethodType.EMAIL, "a@b", 0, new DateTime(), new DateTime()); - NotificationMethodDb notificationMethodDb2 = - new NotificationMethodDb("124", "444", "OtherEmail", AlarmNotificationMethodType.EMAIL, "a@b", 0, new DateTime(), new DateTime()); - - session.save(notificationMethodDb1); - session.save(notificationMethodDb2); - - NotificationMethodTypesDb notificationMethodTypeDb1 = - new NotificationMethodTypesDb("EMAIL"); - NotificationMethodTypesDb notificationMethodTypeDb2 = - new NotificationMethodTypesDb("WEBHOOK"); - NotificationMethodTypesDb notificationMethodTypeDb3 = - new NotificationMethodTypesDb("PAGERDUTY"); - - session.save(notificationMethodTypeDb1); - session.save(notificationMethodTypeDb2); - session.save(notificationMethodTypeDb3); - - - session.getTransaction().commit(); - - } finally { - if (session != null) { - session.close(); - } - } - } - - @Test(groups = "orm") - public void shouldCreate() { - NotificationMethod nmA = repo.create("555", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0); - NotificationMethod nmB = repo.findById("555", nmA.getId()); - - assertEquals(nmA, nmB); - } - - @Test(groups = "orm") - public void shouldExistForTenantAndNotificationMethod() { - assertTrue(repo.exists("444", "123")); - assertFalse(repo.exists("444", "1234")); - assertFalse(repo.exists("333", "123")); - } - - @Test(groups = "orm") - public void shouldFind() { - List nms1 = repo.find("444", null, null, 1); - - assertEquals(nms1, Arrays.asList(new NotificationMethod("123", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0), new NotificationMethod("124", - "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - - List nms2 = repo.find("444", null, "1", 1); - - assertEquals(nms2, Collections.singletonList(new NotificationMethod("124", "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - } - - @Test(groups = "orm") - public void shouldSortBy() { - // null sorts by will sort by ID - List nms1 = repo.find("444", null, null, 1); - assertEquals(nms1, Arrays.asList(new NotificationMethod("123", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0), - new NotificationMethod("124", "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - - List nms2 = repo.find("444", Arrays.asList("name desc", "address"), null, 1); - assertEquals(nms2, Arrays.asList(new NotificationMethod("124", "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0), - new NotificationMethod("123", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - } - - @Test(groups = "orm") - public void shouldUpdate() { - repo.update("444", "123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0); - NotificationMethod nm = repo.findById("444", "123"); - - assertEquals(nm, new NotificationMethod("123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0)); - } - - @Test(groups = "orm") - public void shouldUpdateReturnValue() { - NotificationMethod nm = repo.update("444", "123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0); - - NotificationMethod foundNotificationMethod = repo.findById("444", "123"); - assertEquals(nm, foundNotificationMethod); - } - - @Test(groups = "orm") - public void shouldDeleteById() { - repo.deleteById("444", "123"); - - try { - repo.findById("444", "123"); - fail(); - } catch (EntityNotFoundException ignore) { - } - } - - @Test(groups = "orm") - public void shouldUpdateDuplicateWithSameValues() { - repo.update("444", "123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0); - NotificationMethod nm = repo.findById("444", "123"); - - assertEquals(nm, new NotificationMethod("123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0)); - } - - @Test(groups = "orm", expectedExceptions = EntityExistsException.class) - public void shouldNotUpdateDuplicateWithSameName() { - - repo.update("444", "124", "MyEmail", NOTIFICATION_METHOD_EMAIL, "abc", 0); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepositoryImplTest.java deleted file mode 100644 index 9d292c6d7..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/NotificationMethodTypesSqlRepositoryImplTest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; -import java.util.List; - -import org.hibernate.SQLQuery; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -@Test(groups = "orm") -public class NotificationMethodTypesSqlRepositoryImplTest { - NotificationMethodTypesSqlRepoImpl repo = null; - private SessionFactory sessionFactory; - private Transaction tx; - - private final static List DEFAULT_NOTIFICATION_METHODS = Arrays.asList("Email", "PagerDuty", "WebHook"); - - @BeforeMethod - protected void beforeMethod() throws Exception { - this.sessionFactory = HibernateUtil.getSessionFactory(); - this.repo = new NotificationMethodTypesSqlRepoImpl(sessionFactory); - - this.prepareData(this.sessionFactory); - - this.tx = this.sessionFactory.openSession().beginTransaction(); - } - - @AfterMethod - protected void afterMethod() throws Exception { - this.tx.rollback(); - - this.sessionFactory.close(); - this.sessionFactory = null; - } - - protected void prepareData(final SessionFactory sessionFactory) { - Session session = null; - try { - session = sessionFactory.openSession(); - - session.beginTransaction(); - - for (String method: DEFAULT_NOTIFICATION_METHODS){ - SQLQuery insertQuery = session.createSQLQuery("INSERT INTO NOTIFICATION_METHOD_TYPE (name) VALUES(?)"); - insertQuery.setParameter(0, method); - insertQuery.executeUpdate(); - } - - session.getTransaction().commit(); - - } finally { - if (session != null) { - session.close(); - } - } - } - - - - @Test(groups = "orm") - public void shouldList() { - List result = repo.listNotificationMethodTypes(); - - assertEquals(DEFAULT_NOTIFICATION_METHODS, result); - } - -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/TestHelper.java b/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/TestHelper.java deleted file mode 100644 index fb0a8cd5d..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/hibernate/TestHelper.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * - */ - -package monasca.api.infrastructure.persistence.hibernate; - -import java.util.Random; - -import org.joda.time.DateTime; - -class TestHelper { - private static final int SLEEP_TIME_RANDOM_BYTE_ARRAY = 30; - - private TestHelper() { - } - - static byte[] randomByteArray(final int length) { - return randomByteArray(length, true); - } - - static byte[] randomByteArray(final int length, final boolean sleep) { - if (sleep) { - try { - Thread.sleep(SLEEP_TIME_RANDOM_BYTE_ARRAY); - } catch (InterruptedException e) { - System.err.println(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - } - byte[] b = new byte[length]; - new Random(DateTime.now().getMillis()).nextBytes(b); - return b; - } - -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9UtilsTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9UtilsTest.java deleted file mode 100644 index 7df252d6a..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/influxdb/InfluxV9UtilsTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2015 FUJITSU LIMITED - * - * Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * - */ - -package monasca.api.infrastructure.persistence.influxdb; - -import static org.junit.Assert.assertEquals; - -import org.apache.commons.lang3.StringUtils; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - - -@Test -public class InfluxV9UtilsTest { - - private InfluxV9Utils instance; - - @BeforeMethod - protected void setupClass() throws Exception { - this.instance = new InfluxV9Utils(); - } - - @Test(groups = {"functional", "timeOffsetPart"}) - public void testTimeOffsetPart_Timestamp() throws Exception { - final String ts = "1443009555969"; - final String tsDt = "2015-09-23T11:59:15.969Z"; - - assertEquals(String.format(" and time > '%1$s'", tsDt), this.instance.timeOffsetPart(ts)); - } - - @Test(groups = {"functional", "timeOffsetPart"}) - public void testTimeOffsetPart_DateTime() throws Exception { - final String ts = "2015-09-23T11:59:15.969Z"; - assertEquals(String.format(" and time > '%1$s'", ts), this.instance.timeOffsetPart(ts)); - } - - @Test(groups = {"functional", "timeOffsetPart"}) - public void testTimeOffsetPart_EmptyString() throws Exception { - assertEquals(StringUtils.EMPTY, this.instance.timeOffsetPart(StringUtils.EMPTY)); - } - - @Test(groups = {"functional", "timeOffsetPart"}) - public void testTimeOffsetPart_NullString() throws Exception { - assertEquals(StringUtils.EMPTY, this.instance.timeOffsetPart(null)); - } - - @Test(groups = {"functional", "timeOffsetPart"}) - public void testTimeOffsetPart_0() throws Exception { - final String offset = "0"; - assertEquals(String.format(" and time > '%1$s'", offset), this.instance.timeOffsetPart(offset)); - } - - @Test(groups = {"threeDigitMillisTimestamp"}) - public void testThreeDigitMillisTimestamp_with_3digit() throws Exception { - final String origTimestamp = "2016-01-11T16:10:34.472Z"; - assertEquals(this.instance.threeDigitMillisTimestamp(origTimestamp), origTimestamp); - } - - @Test(groups = {"threeDigitMillisTimestamp"}) - public void testThreeDigitMillisTimestamp_with_2digit() throws Exception { - final String origTimestamp_1 = "2016-01-11T16:10:34.47Z"; - assertEquals(this.instance.threeDigitMillisTimestamp(origTimestamp_1), "2016-01-11T16:10:34.470Z"); - final String origTimestamp_2 = "2016-01-11T16:10:34.40Z"; - assertEquals(this.instance.threeDigitMillisTimestamp(origTimestamp_2), "2016-01-11T16:10:34.400Z"); - } - - @Test(groups = {"threeDigitMillisTimestamp"}) - public void testThreeDigitMillisTimestamp_with_1digit() throws Exception { - final String origTimestamp = "2016-01-11T16:10:34.4Z"; - assertEquals(this.instance.threeDigitMillisTimestamp(origTimestamp), "2016-01-11T16:10:34.400Z"); - } - - @Test(groups = {"threeDigitMillisTimestamp"}) - public void testThreeDigitMillisTimestamp_with_0digit() throws Exception { - final String origTimestamp = "2016-01-11T16:10:34Z"; - assertEquals(this.instance.threeDigitMillisTimestamp(origTimestamp), "2016-01-11T16:10:34.000Z"); - } -} \ No newline at end of file diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepositoryImplTest.java deleted file mode 100644 index a0fb19d4c..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmDefinitionMySqlRepositoryImplTest.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.mysql; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.util.StringMapper; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.io.Resources; - -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.model.alarm.AggregateFunction; -import monasca.common.model.alarm.AlarmOperator; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; - -@Test(groups = "database") -public class AlarmDefinitionMySqlRepositoryImplTest { - private DBI db; - private Handle handle; - private AlarmDefinitionRepo repo; - private List alarmActions; - private AlarmDefinition alarmDef_123; - private AlarmDefinition alarmDef_234; - private AlarmDefinition alarmDef_345; - - @BeforeClass - protected void setupClass() throws Exception { - db = new DBI("jdbc:h2:mem:test;MODE=MySQL"); - handle = db.open(); - handle - .execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset())); - repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils()); - - alarmActions = new ArrayList(); - alarmActions.add("29387234"); - alarmActions.add("77778687"); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("SET foreign_key_checks = 0;"); - handle.execute("truncate table sub_alarm"); - handle.execute("truncate table sub_alarm_definition"); - handle.execute("truncate table alarm_action"); - handle.execute("truncate table sub_alarm_definition_dimension"); - handle.execute("truncate table alarm_definition"); - - handle - .execute("insert into alarm_definition (id, tenant_id, name, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) " - + "values ('123', 'bob', '90% CPU', 'LOW', 'avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10', 'flavor_id,image_id', 1, NOW(), NOW(), NULL)"); - handle - .execute("insert into sub_alarm_definition (id, alarm_definition_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " - + "values ('111', '123', 'avg', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())"); - handle.execute("insert into sub_alarm_definition_dimension values ('111', 'flavor_id', '777')"); - handle.execute("insert into sub_alarm_definition_dimension values ('111', 'image_id', '888')"); - handle.execute("insert into sub_alarm_definition_dimension values ('111', 'metric_name', 'cpu')"); - handle.execute("insert into sub_alarm_definition_dimension values ('111', 'device', '1')"); - handle.execute("insert into alarm_action values ('123', 'ALARM', '29387234')"); - handle.execute("insert into alarm_action values ('123', 'ALARM', '77778687')"); - - handle - .execute("insert into alarm_definition (id, tenant_id, name, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) " - + "values ('234', 'bob', '50% CPU', 'HIGH', 'avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100', 'flavor_id,image_id', 1, NOW(), NOW(), NULL)"); - handle - .execute("insert into sub_alarm_definition (id, alarm_definition_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " - + "values ('222', '234', 'avg', 'hpcs.compute', 'GT', 20, 60, 1, NOW(), NOW())"); - handle - .execute("insert into sub_alarm_definition (id, alarm_definition_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " - + "values ('223', '234', 'avg', 'hpcs.compute', 'LT', 100, 60, 1, NOW(), NOW())"); - handle.execute("insert into sub_alarm_definition_dimension values ('222', 'flavor_id', '777')"); - handle.execute("insert into sub_alarm_definition_dimension values ('222', 'image_id', '888')"); - handle.execute("insert into sub_alarm_definition_dimension values ('222', 'metric_name', 'mem')"); - handle.execute("insert into alarm_action values ('234', 'ALARM', '29387234')"); - handle.execute("insert into alarm_action values ('234', 'ALARM', '77778687')"); - - handle - .execute("insert into alarm_definition (id, tenant_id, name, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) " - + "values ('345', 'bob', 'Testing Critical', 'CRITICAL', 'avg(test_metric{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(test_metric) < 100', 'flavor_id,image_id', 1, NOW(), NOW(), NULL)"); - handle - .execute("insert into sub_alarm_definition (id, alarm_definition_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " - + "values ('333', '345', 'avg', 'test_metric', 'GT', 20, 60, 1, NOW(), NOW())"); - handle - .execute("insert into sub_alarm_definition (id, alarm_definition_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " - + "values ('334', '345', 'avg', 'test_metric', 'LT', 100, 60, 1, NOW(), NOW())"); - handle.execute("insert into sub_alarm_definition_dimension values ('333', 'flavor_id', '777')"); - handle.execute("insert into sub_alarm_definition_dimension values ('333', 'image_id', '888')"); - handle.execute("insert into sub_alarm_definition_dimension values ('333', 'metric_name', 'mem')"); - handle.execute("insert into alarm_action values ('345', 'ALARM', '29387234')"); - handle.execute("insert into alarm_action values ('345', 'ALARM', '77778687')"); - - alarmDef_123 = new AlarmDefinition("123", "90% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), - Collections.emptyList(), Collections.emptyList()); - alarmDef_234 = new AlarmDefinition("234","50% CPU", null, "HIGH", - "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), - Collections.emptyList(), Collections.emptyList()); - alarmDef_345 = new AlarmDefinition("345","Testing Critical", null, "CRITICAL", - "avg(test_metric{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(test_metric) < 100", - Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), - Collections.emptyList(), Collections.emptyList()); - } - - public void shouldCreate() { - Map subExpressions = - ImmutableMap - .builder() - .put( - "4433", - AlarmSubExpression - .of("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10")) - .build(); - - AlarmDefinition alarmA = - repo.create("555", "2345", "90% CPU", null, "LOW", - "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10", subExpressions, - Arrays.asList("flavor_id", "image_id"), alarmActions, null, null); - AlarmDefinition alarmB = repo.findById("555", alarmA.getId()); - - assertEquals(alarmA, alarmB); - - // Assert that sub-alarm and sub-alarm-dimensions made it to the db - assertEquals( - handle.createQuery("select count(*) from sub_alarm_definition where id = 4433") - .map(StringMapper.FIRST).first(), "1"); - assertEquals( - handle.createQuery("select count(*) from sub_alarm_definition_dimension where sub_alarm_definition_id = 4433") - .map(StringMapper.FIRST).first(), "3"); - } - - @Test(groups = "database") - public void shouldUpdate() { - // This test won't work without the real mysql database so use mini-mon. - // Warning, this will truncate your mini-mon database - db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password"); - handle = db.open(); - repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils()); - beforeMethod(); - - List oldSubAlarmIds = Arrays.asList("222"); - AlarmSubExpression changedSubExpression = AlarmSubExpression.of("avg(hpcs.compute) <= 200"); - Map changedSubExpressions = - ImmutableMap.builder().put("223", changedSubExpression).build(); - AlarmSubExpression newSubExpression = AlarmSubExpression.of("avg(foo{flavor_id=777}) > 333"); - Map newSubExpressions = - ImmutableMap.builder().put("555", newSubExpression).build(); - - repo.update("bob", "234", false, "90% CPU", null, - "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", - Arrays.asList("flavor_id", "image_id"), "LOW", false, oldSubAlarmIds, - changedSubExpressions, newSubExpressions, alarmActions, null, null); - - AlarmDefinition alarm = repo.findById("bob", "234"); - AlarmDefinition expected = - new AlarmDefinition("234", "90% CPU", null, "LOW", - "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", Arrays.asList( - "flavor_id", "image_id"), false, alarmActions, Collections.emptyList(), - Collections.emptyList()); - assertEquals(expected, alarm); - - Map subExpressions = repo.findSubExpressions("234"); - assertEquals(subExpressions.get("223"), changedSubExpression); - assertEquals(subExpressions.get("555"), newSubExpression); - } - - public void shouldFindById() { - assertEquals(alarmDef_123, repo.findById("bob", "123")); - - // Make sure it still finds AlarmDefinitions with no notifications - handle.execute("delete from alarm_action"); - alarmDef_123.setAlarmActions(new ArrayList(0)); - assertEquals(alarmDef_123, repo.findById("bob", "123")); - } - - @Test(groups = "database") - public void shouldFindSubAlarmMetricDefinitions() { - // This test won't work without the real mysql database so use mini-mon. - // Warning, this will truncate your mini-mon database - db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password"); - handle = db.open(); - repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils()); - beforeMethod(); - - assertEquals( - repo.findSubAlarmMetricDefinitions("123").get("111"), - new MetricDefinition("hpcs.compute", ImmutableMap.builder() - .put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu") - .put("device", "1").build())); - - assertEquals( - repo.findSubAlarmMetricDefinitions("234").get("222"), - new MetricDefinition("hpcs.compute", ImmutableMap.builder() - .put("flavor_id", "777").put("image_id", "888").put("metric_name", "mem").build())); - - assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty()); - } - - @Test(groups = "database") - public void shouldFindSubExpressions() { - // This test won't work without the real mysql database so use mini-mon. - // Warning, this will truncate your mini-mon database - db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password"); - handle = db.open(); - repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils()); - beforeMethod(); - - assertEquals( - repo.findSubExpressions("123").get("111"), - new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute", - ImmutableMap.builder().put("flavor_id", "777").put("image_id", "888") - .put("metric_name", "cpu").put("device", "1").build()), AlarmOperator.GT, 10, 60, 1)); - - assertEquals(repo.findSubExpressions("234").get("223"), new AlarmSubExpression( - AggregateFunction.AVG, new MetricDefinition("hpcs.compute", new HashMap()), AlarmOperator.LT, 100, - 60, 1)); - - assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty()); - } - - public void testExists() { - assertEquals(repo.exists("bob", "90% CPU"),"123"); - - // Negative - assertNull(repo.exists("bob", "999% CPU")); - } - - public void shouldFind() { - assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null, null, null, 1)); - - // Make sure it still finds AlarmDefinitions with no notifications - handle.execute("delete from alarm_action"); - alarmDef_123.setAlarmActions(new ArrayList(0)); - alarmDef_234.setAlarmActions(new ArrayList(0)); - assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null, null, null, 1)); - - assertEquals(0, repo.find("bill", null, null, null, null, null, 1).size()); - - assertEquals(Arrays.asList(alarmDef_234, alarmDef_123), - repo.find("bob", null, null, null, Arrays.asList("name"), null, 1)); - - assertEquals(Arrays.asList(alarmDef_234, alarmDef_123), - repo.find("bob", null, null, null, Arrays.asList("id desc"), null, 1)); - } - - public void shouldFindByDimension() { - final Map dimensions = new HashMap<>(); - dimensions.put("image_id", "888"); - assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), - repo.find("bob", null, dimensions, null, null, null, 1)); - - dimensions.clear(); - dimensions.put("device", "1"); - assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", null, dimensions, null, null, null, 1)); - - dimensions.clear(); - dimensions.put("Not real", "AA"); - assertEquals(0, repo.find("bob", null, dimensions, null, null, null, 1).size()); - } - - public void shouldFindByName() { - assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", "90% CPU", null, null, null, null, 1)); - - assertEquals(0, repo.find("bob", "Does not exist", null, null, null, null, 1).size()); - } - - public void shouldFindBySeverity() { - assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.HIGH), null, null, 1)); - - assertEquals(0, repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.CRITICAL), null, null, 1).size()); - - assertEquals(Arrays.asList(alarmDef_234, alarmDef_345), - repo.find("bob", null, null, Lists.newArrayList(AlarmSeverity.HIGH, AlarmSeverity.CRITICAL), - null, null, 2)); - } - - public void shouldDeleteById() { - repo.deleteById("bob", "123"); - - try { - assertNull(repo.findById("bob", "123")); - fail(); - } catch (EntityNotFoundException expected) { - } - assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, null, null, null, 1)); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepositoryImplTest.java deleted file mode 100644 index 7e151f698..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/AlarmMySqlRepositoryImplTest.java +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.mysql; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertTrue; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.alarm.AlarmRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.model.alarm.AlarmSeverity; -import monasca.common.model.alarm.AlarmState; -import monasca.common.model.alarm.AlarmSubExpression; -import monasca.common.model.metric.MetricDefinition; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -/** - * These tests won't work without the real mysql database so use mini-mon. - * Warning, this will truncate your mini-mon database - * @author craigbr - * - */ -@Test -public class AlarmMySqlRepositoryImplTest { - private static final String TENANT_ID = "bob"; - private static final String ALARM_ID = "234111"; - private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - private DBI db; - private Handle handle; - private AlarmRepo repo; - private List alarmActions; - private Alarm compoundAlarm; - private Alarm alarm1; - private Alarm alarm2; - private Alarm alarm3; - - @BeforeClass - protected void setupClass() throws Exception { - // This test won't work without the real mysql database so use mini-mon. - // Warning, this will truncate your mini-mon database - db = new DBI("jdbc:mysql://192.168.10.4:3306/mon?connectTimeout=5000&autoReconnect=true&useLegacyDatetimeCode=false", "monapi", "password"); - - handle = db.open(); - /* - handle - .execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset())); - */ - repo = new AlarmMySqlRepoImpl(db, new PersistUtils()); - - alarmActions = new ArrayList(); - alarmActions.add("29387234"); - alarmActions.add("77778687"); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("SET foreign_key_checks = 0;"); - handle.execute("truncate table alarm"); - handle.execute("truncate table sub_alarm"); - handle.execute("truncate table alarm_action"); - handle.execute("truncate table alarm_definition"); - handle.execute("truncate table alarm_metric"); - handle.execute("truncate table metric_definition"); - handle.execute("truncate table metric_definition_dimensions"); - handle.execute("truncate table metric_dimension"); - - DateTime timestamp1 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:53").withZoneRetainFields(DateTimeZone.forID("UTC")); - DateTime timestamp2 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:54").withZoneRetainFields(DateTimeZone.forID("UTC")); - DateTime timestamp3 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:55").withZoneRetainFields(DateTimeZone.forID("UTC")); - - handle - .execute( - "insert into alarm_definition (id, tenant_id, name, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) " - + "values ('1', 'bob', '90% CPU', 'LOW', 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10', 'flavor_id,image_id', 1, NOW(), NOW(), NULL)"); - handle - .execute("insert into alarm (id, alarm_definition_id, state, lifecycle_state, link, created_at, updated_at, state_updated_at) values ('1', '1', 'OK', 'OPEN', 'http://somesite.com/this-alarm-info', '"+timestamp1.toString().replace('Z', ' ')+"', '"+timestamp1.toString().replace('Z', ' ')+"', '"+timestamp1.toString().replace('Z', ' ')+"')"); - handle - .execute("insert into alarm (id, alarm_definition_id, state, lifecycle_state, created_at, updated_at, state_updated_at) values ('2', '1', 'UNDETERMINED', 'OPEN', '"+timestamp2.toString().replace('Z', ' ')+"', '"+timestamp2.toString().replace('Z', ' ')+"', '"+timestamp2.toString().replace('Z', ' ')+"')"); - handle - .execute("insert into alarm (id, alarm_definition_id, state, link, created_at, updated_at, state_updated_at) values ('3', '1', 'ALARM', 'http://somesite.com/this-alarm-info', '"+timestamp3.toString().replace('Z', ' ')+"', '"+timestamp3.toString().replace('Z', ' ')+"', '"+timestamp3.toString().replace('Z', ' ')+"')"); - long subAlarmId = 42; - for (int alarmId = 1; alarmId <= 3; alarmId++) { - handle - .execute("insert into sub_alarm (id, alarm_id, expression, created_at, updated_at) values ('" - + String.valueOf(subAlarmId++) - + "', '" + alarmId - + "', 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10', NOW(), NOW())"); - } - - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('1', 11)"); - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('1', 22)"); - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('2', 11)"); - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('3', 22)"); - handle - .execute("insert into metric_definition (id, name, tenant_id, region) values (1, 'cpu.idle_perc', 'bob', 'west')"); - handle - .execute("insert into metric_definition_dimensions (id, metric_definition_id, metric_dimension_set_id) values (11, 1, 1)"); - handle - .execute("insert into metric_definition_dimensions (id, metric_definition_id, metric_dimension_set_id) values (22, 1, 2)"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (1, 'instance_id', '123')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (1, 'service', 'monitoring')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (2, 'flavor_id', '222')"); - - alarm1 = - new Alarm("1", "1", "90% CPU", "LOW", buildAlarmMetrics( - buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service", "monitoring"), - buildMetricDefinition("cpu.idle_perc", "flavor_id", "222")), - AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1); - - alarm2 = - new Alarm("2", "1", "90% CPU", "LOW", buildAlarmMetrics( - buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service", "monitoring")), - AlarmState.UNDETERMINED, "OPEN", null, timestamp2, timestamp2, timestamp2); - - alarm3 = - new Alarm("3", "1", "90% CPU", "LOW", buildAlarmMetrics( - buildMetricDefinition("cpu.idle_perc", "flavor_id", "222")), - AlarmState.ALARM, null, "http://somesite.com/this-alarm-info", timestamp3, timestamp3, timestamp3); - - DateTime timestamp4 = ISO_8601_FORMATTER.parseDateTime("2015-03-15T09:26:53Z"); - - handle - .execute( - "insert into alarm_definition (id, tenant_id, name, severity, expression, match_by, actions_enabled, created_at, updated_at, deleted_at) " - + "values ('234', 'bob', '50% CPU', 'HIGH', 'avg(cpu.sys_mem{service=monitoring}) > 20 and avg(cpu.idle_perc{service=monitoring}) < 10', 'hostname,region', 1, NOW(), NOW(), NULL)"); - handle - .execute("insert into alarm (id, alarm_definition_id, state, created_at, updated_at, state_updated_at) values ('234111', '234', 'UNDETERMINED', '"+timestamp4.toString().replace('Z', ' ')+"', '"+timestamp4.toString().replace('Z', ' ')+"', '"+timestamp4.toString().replace('Z', ' ')+"')"); - handle - .execute("insert into sub_alarm (id, alarm_id, expression, created_at, updated_at) values ('4343', '234111', 'avg(cpu.sys_mem{service=monitoring}) > 20', NOW(), NOW())"); - handle - .execute("insert into sub_alarm (id, alarm_id, expression, created_at, updated_at) values ('4242', '234111', 'avg(cpu.idle_perc{service=monitoring}) < 10', NOW(), NOW())"); - - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('234111', 31)"); - handle - .execute("insert into alarm_metric (alarm_id, metric_definition_dimensions_id) values ('234111', 32)"); - handle - .execute("insert into metric_definition (id, name, tenant_id, region) values (111, 'cpu.sys_mem', 'bob', 'west')"); - handle - .execute("insert into metric_definition (id, name, tenant_id, region) values (112, 'cpu.idle_perc', 'bob', 'west')"); - handle - .execute("insert into metric_definition_dimensions (id, metric_definition_id, metric_dimension_set_id) values (31, 111, 21)"); - handle - .execute("insert into metric_definition_dimensions (id, metric_definition_id, metric_dimension_set_id) values (32, 112, 22)"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (21, 'service', 'monitoring')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (22, 'service', 'monitoring')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (21, 'hostname', 'roland')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (22, 'hostname', 'roland')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (21, 'region', 'colorado')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (22, 'region', 'colorado')"); - handle - .execute("insert into metric_dimension (dimension_set_id, name, value) values (22, 'extra', 'vivi')"); - - compoundAlarm = - new Alarm("234111", "234", "50% CPU", "HIGH", buildAlarmMetrics( - buildMetricDefinition("cpu.sys_mem", "service", "monitoring", "hostname", "roland", - "region", "colorado"), - buildMetricDefinition("cpu.idle_perc", "service", "monitoring", "hostname", "roland", - "region", "colorado", "extra", "vivi")), AlarmState.UNDETERMINED, null, null, - timestamp4, timestamp4, timestamp4); - } - - private List buildAlarmMetrics(final MetricDefinition ... metricDefinitions) { - return Arrays.asList(metricDefinitions); - } - - private MetricDefinition buildMetricDefinition(final String metricName, - final String ... dimensions) { - final Builder builder = ImmutableMap.builder(); - for (int i = 0; i < dimensions.length;) { - builder.put(dimensions[i], dimensions[i+1]); - i += 2; - } - return new MetricDefinition(metricName, builder.build()); - } - - @Test(groups = "database") - public void shouldDelete() { - repo.deleteById(TENANT_ID, ALARM_ID); - - List> rows = handle.createQuery("select * from alarm_definition where id='234'").list(); - assertEquals(rows.size(), 1, "Alarm Definition was deleted as well"); - } - - @Test(groups = "database", expectedExceptions=EntityNotFoundException.class) - public void shouldThowExceptionOnDelete() { - repo.deleteById(TENANT_ID, "Not an alarm ID"); - } - - @Test(groups = "database") - public void shouldFindAlarmSubExpressions() { - final Map subExpressionMap = repo.findAlarmSubExpressions(ALARM_ID); - assertEquals(subExpressionMap.size(), 2); - assertEquals(subExpressionMap.get("4343"), - AlarmSubExpression.of("avg(cpu.sys_mem{service=monitoring}) > 20")); - assertEquals(subExpressionMap.get("4242"), - AlarmSubExpression.of("avg(cpu.idle_perc{service=monitoring}) < 10")); - } - - @Test(groups = "database") - public void shouldAlarmSubExpressionsForAlarmDefinition() { - final Map> alarmSubExpressionMap = - repo.findAlarmSubExpressionsForAlarmDefinition(alarm1.getAlarmDefinition().getId()); - assertEquals(alarmSubExpressionMap.size(), 3); - long subAlarmId = 42; - for (int alarmId = 1; alarmId <= 3; alarmId++) { - final Map subExpressionMap = - alarmSubExpressionMap.get(String.valueOf(alarmId)); - assertEquals(subExpressionMap.get(String.valueOf(subAlarmId)), - AlarmSubExpression.of("avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10")); - subAlarmId++; - } - } - - private void checkList(List found, Alarm ... expected) { - assertEquals(found.size(), expected.length); - for (Alarm alarm : expected) { - assertTrue(found.contains(alarm)); - } - } - - @Test(groups = "database") - public void shouldFind() { - checkList(repo.find("Not a tenant id", null, null, null, null, null, null, null, null, null, null, 1, false)); - - checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm); - - checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", null, null, null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm); - - checkList( - repo.find(TENANT_ID, null, "cpu.idle_perc", - ImmutableMap.builder().put("flavor_id", "222").build(), null, null, null, null, null, null, null, 1, false), alarm1, - alarm3); - - checkList( - repo.find(TENANT_ID, null, "cpu.idle_perc", - ImmutableMap.builder().put("service", "monitoring") - .put("hostname", "roland").build(), null, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkList(repo.find(TENANT_ID, null, null, null, AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), - alarm2, - compoundAlarm); - - checkList( - repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap - .builder().put("service", "monitoring").build(), null, null, null, null, null, null, null, 1, false), alarm1, alarm2); - - checkList( - repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", null, null, null, null, null, null, null, null, 1, false), - alarm1, alarm2, alarm3); - - checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, - AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), compoundAlarm); - - checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), - compoundAlarm); - - checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.builder() - .put("service", "monitoring").build(), AlarmState.UNDETERMINED, null, null, null, null, null, null, 1,false), alarm2, compoundAlarm); - - checkList(repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", - ImmutableMap.builder().put("service", "monitoring").build(), - AlarmState.UNDETERMINED, null, null, null, null, null, null, 1, false), alarm2); - - checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, DateTime.now(DateTimeZone.forID("UTC")), null, null, 0, false)); - - checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-15T00:00:00Z"), null, null, 0, false), compoundAlarm); - - checkList( - repo.find(TENANT_ID, null, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-14T00:00:00Z"), null, null, - 1, false), alarm1, alarm2, alarm3, compoundAlarm); - - checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Arrays.asList("state","severity"), null, 1, false), - alarm1, alarm2, compoundAlarm, alarm3); - - checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, Arrays.asList("state desc","severity"), null, 1, false), - compoundAlarm, alarm3, alarm2, alarm1); - - checkList(repo.find(TENANT_ID, null, null, null, null, Arrays.asList(AlarmSeverity.HIGH), null, null, null, null, null, 1, false), - compoundAlarm); - } - - private DateTime getAlarmStateUpdatedDate(final String alarmId) { - final List> rows = - handle.createQuery("select state_updated_at from alarm where id = :alarmId") - .bind("alarmId", alarmId).list(); - final Object state_updated_at = rows.get(0).get("state_updated_at"); - return (new DateTime(((Timestamp)state_updated_at).getTime(), DateTimeZone.forID("UTC"))); - } - - private DateTime getAlarmUpdatedDate(final String alarmId) { - final List> rows = - handle.createQuery("select updated_at from alarm where id = :alarmId") - .bind("alarmId", alarmId).list(); - final Object state_updated_at = rows.get(0).get("updated_at"); - return (new DateTime(((Timestamp)state_updated_at).getTime(), DateTimeZone.forID("UTC"))); - } - - @Test(groups = "database") - public void shouldUpdate() throws InterruptedException { - final Alarm originalAlarm = repo.findById(TENANT_ID, ALARM_ID); - final DateTime originalStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID); - final DateTime originalUpdatedAt = getAlarmUpdatedDate(ALARM_ID); - assertEquals(originalAlarm.getState(), AlarmState.UNDETERMINED); - - Thread.sleep(1000); - final Alarm newAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, null, null); - final DateTime newStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID); - final DateTime newUpdatedAt = getAlarmUpdatedDate(ALARM_ID); - assertNotEquals(newStateUpdatedAt.getMillis(), originalStateUpdatedAt.getMillis(), - "state_updated_at did not change"); - assertNotEquals(newUpdatedAt.getMillis(), originalUpdatedAt.getMillis(), - "updated_at did not change"); - - assertEquals(newAlarm, originalAlarm); - - newAlarm.setState(AlarmState.OK); - newAlarm.setStateUpdatedTimestamp(newStateUpdatedAt); - newAlarm.setUpdatedTimestamp(newUpdatedAt); - - // Make sure it was updated in the DB - assertEquals(repo.findById(TENANT_ID, ALARM_ID), newAlarm); - - Thread.sleep(1000); - final Alarm unchangedAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, "OPEN", null); - assertTrue(getAlarmStateUpdatedDate(ALARM_ID).equals(newStateUpdatedAt), "state_updated_at did change"); - assertNotEquals(getAlarmUpdatedDate(ALARM_ID).getMillis(), newStateUpdatedAt, "updated_at did not change"); - assertEquals(unchangedAlarm, newAlarm); - } - - @Test(groups = "database", expectedExceptions=EntityNotFoundException.class) - public void shouldUpdateThrowException() { - - repo.update(TENANT_ID, "Not a valid alarm id", AlarmState.UNDETERMINED, null, null); - } - - @Test(groups = "database") - public void shouldFindById() { - - final Alarm alarm = repo.findById(TENANT_ID, compoundAlarm.getId()); - - assertEquals(alarm, compoundAlarm); - } - - @Test(groups = "database", expectedExceptions=EntityNotFoundException.class) - public void shouldFindByIdThrowException() { - - repo.findById(TENANT_ID, "Not a valid alarm id"); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepositoryImplTest.java deleted file mode 100644 index 1a6b09cef..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodMySqlRepositoryImplTest.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.mysql; - -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.List; - -import monasca.api.domain.exception.EntityExistsException; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.notificationmethod.NotificationMethod; - -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import com.google.common.io.Resources; -import monasca.api.infrastructure.persistence.PersistUtils; - -@Test -public class NotificationMethodMySqlRepositoryImplTest { - private DBI db; - private Handle handle; - private NotificationMethodMySqlRepoImpl repo; - Handle realHandle; - Handle spyHandle; - private boolean shouldRollback = false; - - private static final String NOTIFICATION_METHOD_WEBHOOK = "WEBHOOK"; - private static final String NOTIFICATION_METHOD_EMAIL = "EMAIL"; - private static final String NOTIFICATION_METHOD_PAGERDUTY = "PAGERDUTY"; - - @BeforeClass - protected void beforeClass() throws Exception { - db = new DBI("jdbc:h2:mem:test;MODE=MySQL"); - handle = db.open(); - handle.execute(Resources.toString(getClass().getResource("notification_method.sql"), - Charset.defaultCharset())); - handle.execute(Resources.toString(getClass().getResource("notification_method_type.sql"), - Charset.defaultCharset())); - handle - .execute("insert into notification_method_type ( name) values ('EMAIL')"); - handle - .execute("insert into notification_method_type ( name) values ('PAGERDUTY')"); - handle - .execute("insert into notification_method_type ( name) values ('WEBHOOK')"); - final DBI mockDb = mock(DBI.class); - when(mockDb.open()).thenAnswer(new Answer() { - public Handle answer(InvocationOnMock invocation) { - realHandle = db.open(); - spyHandle = spy(realHandle); - // Ensure there is no active transaction when the handle is closed. - // Have to do this in the close() method because calling isInTransaction() - // after the close throws an exception - doAnswer(new Answer() { - public Void answer(InvocationOnMock invocation) { - assertFalse(spyHandle.isInTransaction()); - realHandle.close(); - return null; - } - }).when(spyHandle).close(); - return spyHandle; - } - }); - repo = new NotificationMethodMySqlRepoImpl(mockDb, new PersistUtils()); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("truncate table notification_method"); - handle - .execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('123', '444', 'MyEmail', 'EMAIL', 'a@b', NOW(), NOW())"); - handle - .execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('124', '444', 'OtherEmail', 'EMAIL', 'a@b', NOW(), NOW())"); - shouldRollback = false; - } - - @AfterMethod - protected void afterMethod() { - if (shouldRollback) { - verify(spyHandle, times(1)).rollback(); - } - } - - public void shouldCreateEmail() { - NotificationMethod nmA = repo.create("555", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0); - verify(spyHandle, times(1)).commit(); - NotificationMethod nmB = repo.findById("555", nmA.getId()); - - assertEquals(nmA, nmB); - } - - @Test(expectedExceptions = EntityExistsException.class) - public void shouldNotCreateDuplicateEmail() { - shouldRollback = true; - repo.create("444", "MyEmail", NOTIFICATION_METHOD_EMAIL, "c@d", 0); - } - - public void shouldCreateWebhookNonZeroPeriod() { - NotificationMethod nmA = repo.create("555", "MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://localhost:33", 60); - verify(spyHandle, times(1)).commit(); - NotificationMethod nmB = repo.findById("555", nmA.getId()); - - assertEquals(nmA, nmB); - } - - public void shouldExistForTenantAndNotificationMethod() { - assertTrue(repo.exists("444", "123")); - assertFalse(repo.exists("444", "1234")); - assertFalse(repo.exists("333", "123")); - } - - public void shouldFindById() { - NotificationMethod nm = repo.findById("444", "123"); - - assertEquals(nm.getId(), "123"); - assertEquals(nm.getType(), NOTIFICATION_METHOD_EMAIL); - assertEquals(nm.getAddress(), "a@b"); - } - - public void shouldFind() { - List nms1 = repo.find("444", null, null, 1); - - assertEquals(nms1, Arrays.asList(new NotificationMethod("123", "MyEmail", - NOTIFICATION_METHOD_EMAIL, "a@b", 0),new NotificationMethod("124", "OtherEmail", - NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - - List nms2 = repo.find("444", null, "1", 1); - - assertEquals(nms2, Arrays.asList(new NotificationMethod("124", "OtherEmail", - NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - } - - public void shouldSortBy() { - // null sorts by will sort by ID - List nms1 = repo.find("444", null, null, 1); - assertEquals(nms1, Arrays.asList(new NotificationMethod("123", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0), - new NotificationMethod("124", "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - - List nms2 = repo.find("444", Arrays.asList("name desc", "address"), null, 1); - assertEquals(nms2, Arrays.asList(new NotificationMethod("124", "OtherEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0), - new NotificationMethod("123", "MyEmail", NOTIFICATION_METHOD_EMAIL, "a@b", 0))); - } - - public void shouldUpdate() { - repo.update("444", "123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0); - verify(spyHandle, times(1)).commit(); - NotificationMethod nm = repo.findById("444", "123"); - - assertEquals(nm, new NotificationMethod("123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0)); - } - - public void shouldUpdateWebhookWithNonZeroPeriod() { - NotificationMethod nmOriginal = repo.create("555", "MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://localhost:33", 0); - repo.update("555", nmOriginal.getId(), "MyWebhook", NOTIFICATION_METHOD_WEBHOOK, "http://localhost:33", 60); - verify(spyHandle, times(1)).commit(); - NotificationMethod nmUpdated = repo.findById("555", nmOriginal.getId()); - - assertEquals(nmUpdated.getPeriod(), 60); - } - - public void shouldDeleteById() { - repo.deleteById("444", "123"); - - try { - repo.findById("444", "123"); - fail(); - } catch (EntityNotFoundException expected) { - } - } - - public void shouldUpdateDuplicateWithSameValues() { - repo.update("444", "123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0); - verify(spyHandle, times(1)).commit(); - NotificationMethod nm = repo.findById("444", "123"); - - assertEquals(nm, new NotificationMethod("123", "Foo", NOTIFICATION_METHOD_EMAIL, "abc", 0)); - } - - @Test(expectedExceptions = EntityExistsException.class) - public void shouldNotUpdateDuplicateWithSameName() { - shouldRollback = true; - repo.update("444", "124", "MyEmail", NOTIFICATION_METHOD_EMAIL, "abc", 0); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypeMySqlRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypeMySqlRepositoryImplTest.java deleted file mode 100644 index 544ea4b82..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/mysql/NotificationMethodTypeMySqlRepositoryImplTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.mysql; - -import static org.testng.Assert.assertEquals; - -import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.List; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import com.google.common.io.Resources; - -import monasca.api.infrastructure.persistence.PersistUtils; - -@Test -public class NotificationMethodTypeMySqlRepositoryImplTest { - private DBI db; - private Handle handle; - private NotificationMethodTypesMySqlRepoImpl repo; - - private final static List DEFAULT_NOTIFICATION_METHODS = Arrays.asList("Email", "PagerDuty", "WebHook"); - - @BeforeClass - protected void beforeClass() throws Exception { - db = new DBI("jdbc:h2:mem:test;MODE=MySQL"); - handle = db.open(); - handle.execute(Resources.toString(getClass().getResource("notification_method_type.sql"), - Charset.defaultCharset())); - repo = new NotificationMethodTypesMySqlRepoImpl(db, new PersistUtils()); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("truncate table notification_method_type"); - createNotificationMethodTypes(); - } - - - private void createNotificationMethodTypes() { - try (Handle h = db.open()) { - h.begin(); - for (String methodType : DEFAULT_NOTIFICATION_METHODS){ - h.insert("insert into notification_method_type (name) values (?)", methodType); - } - h.commit(); - } - } - - public void shouldListNotificationMethodTypes() { - List notification_method_types = repo.listNotificationMethodTypes(); - assertEquals(notification_method_types, DEFAULT_NOTIFICATION_METHODS); - } - -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepositoryImplTest.java deleted file mode 100644 index 512ef9387..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/AlarmStateHistoryVerticaRepositoryImplTest.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.vertica; - -import monasca.common.model.alarm.AlarmState; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; - -import org.joda.time.DateTime; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.sql.Timestamp; - -import static org.testng.Assert.assertEquals; - -@Test(groups = "database") -public class AlarmStateHistoryVerticaRepositoryImplTest { - private DBI db; - private Handle handle; - private AlarmStateHistoryRepo repo; - - @BeforeClass - protected void setupClass() throws Exception { - Class.forName("com.vertica.jdbc.Driver"); - db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password"); - handle = db.open(); - repo = new AlarmStateHistoryVerticaRepoImpl(db, null, null); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("truncate table MonAlarms.StateHistory"); - } - - private void create(String tenantId, String alarmId, AlarmState oldState, AlarmState newState, - String reason, String reasonData, DateTime timestamp) { - try (Handle h = db.open()) { - h.insert("insert into MonAlarms.StateHistory (tenant_id, alarm_id, old_state, new_state, " - + "reason, reason_data, time_stamp) values (?, ?, ?, ?, ?, ?, ?)", tenantId, alarmId, - oldState.name(), newState.name(), reason, reasonData, - new Timestamp(timestamp.getMillis())); - } - } - - @Test - public void shouldCreateAndFind() throws Exception { - create("bob", "123", AlarmState.UNDETERMINED, AlarmState.ALARM, "foo", "bar", new DateTime()); - assertEquals(repo.findById("bob", "123", null, 1).size(), 1); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepositoryImplTest.java deleted file mode 100644 index 831b71e0b..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MeasurementVerticaRepositoryImplTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.vertica; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.measurement.MeasurementRepo; -import monasca.api.domain.model.measurement.Measurements; - -import org.joda.time.DateTime; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -import static org.testng.Assert.assertEquals; - -@Test(groups = "database") -public class MeasurementVerticaRepositoryImplTest { - private ApiConfig config; - private DBI db; - private Handle handle; - private MeasurementRepo repo; - - @BeforeClass - protected void setupClass() throws Exception { - Class.forName("com.vertica.jdbc.Driver"); - db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password"); - handle = db.open(); - config = new ApiConfig(); - repo = new MeasurementVerticaRepoImpl(db, config); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("truncate table MonMetrics.Definitions"); - handle.execute("truncate table MonMetrics.Dimensions"); - handle.execute("truncate table MonMetrics.Measurements"); - handle.execute("truncate table MonMetrics.DefinitionDimensions"); - - handle - .execute("insert into MonMetrics.Definitions values ('/1', 'cpu_utilization', 'bob', '1')"); - - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'service', 'compute')"); - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'instance_id', '123')"); - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'flavor_id', '1')"); - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/1', '/1', '/5')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/1', '2014-01-01 00:00:00', 10)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/1', '2014-01-01 00:01:00', 15)"); - - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'service', 'compute')"); - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'instance_id', '123')"); - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'flavor_id', '2')"); - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/2', '/1', '/8')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/2', '2014-01-01 00:00:00', 12)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/2', '2014-01-01 00:01:00', 13)"); - - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/3', '/1', '')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/3', '2014-01-01 00:00:00', 4)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/3', '2014-01-01 00:01:00', 8)"); - } - - public void shouldFindWithoutDimensions() throws Exception { - Collection measurements = - repo.find("bob", "cpu_utilization", null, new DateTime(2014, 1, 1, 0, 0, 0), null, null, 1, - false, null); - assertEquals(measurements.size(), 3); - } - - public void shouldFindWithDimensions() throws Exception { - Map dims = new HashMap<>(); - dims.put("service", "compute"); - dims.put("instance_id", "123"); - - Collection measurements = - repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1, - false, null); - assertEquals(measurements.size(), 2); - - dims.put("flavor_id", "2"); - measurements = repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1, - false, null); - assertEquals(measurements.size(), 1); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepositoryImplTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepositoryImplTest.java deleted file mode 100644 index ae4c88d44..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricDefinitionVerticaRepositoryImplTest.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.vertica; - -import monasca.common.model.metric.MetricDefinition; -import monasca.api.ApiConfig; -import monasca.api.domain.model.metric.MetricDefinitionRepo; - -import org.joda.time.DateTime; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.testng.Assert.assertEquals; - -@Test(groups = "database") -public class MetricDefinitionVerticaRepositoryImplTest { - private DBI db; - private Handle handle; - private MetricDefinitionRepo repo; - private ApiConfig config; - - @BeforeClass - protected void setupClass() throws Exception { - Class.forName("com.vertica.jdbc.Driver"); - db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password"); - handle = db.open(); - config = new ApiConfig(); - repo = new MetricDefinitionVerticaRepoImpl(db, config); - } - - @AfterClass - protected void afterClass() { - handle.close(); - } - - @BeforeMethod - protected void beforeMethod() { - handle.execute("truncate table MonMetrics.Definitions"); - handle.execute("truncate table MonMetrics.Dimensions"); - handle.execute("truncate table MonMetrics.Measurements"); - handle.execute("truncate table MonMetrics.DefinitionDimensions"); - - handle - .execute("insert into MonMetrics.Definitions values ('/1', 'cpu_utilization', 'bob', '1')"); - - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'service', 'compute')"); - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'instance_id', '123')"); - handle.execute("insert into MonMetrics.Dimensions values ('/5', 'flavor_id', '1')"); - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/1', '/1', '/5')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/1', '2014-01-01 00:00:00', 10)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/1', '2014-01-01 00:01:00', 15)"); - - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'service', 'compute')"); - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'instance_id', '123')"); - handle.execute("insert into MonMetrics.Dimensions values ('/8', 'flavor_id', '2')"); - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/2', '/1', '/8')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/2', '2014-01-01 00:00:00', 12)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/2', '2014-01-01 00:01:00', 13)"); - - handle.execute("insert into MonMetrics.DefinitionDimensions values ('/3', '/1', '')"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/3', '2014-01-01 00:00:00', 4)"); - handle - .execute("insert into MonMetrics.Measurements (definition_dimensions_id, time_stamp, value) values ('/3', '2014-01-01 00:01:00', 8)"); - } - - public void shouldFindWithoutDimensions() throws Exception { - List defs = repo.find("bob", "cpu_utilization", null, null, null, null, 1); - assertEquals(defs.size(), 3); - } - - public void shouldFindWithStartTime() throws Exception { - List defs = repo.find("bob", - "cpu_utilization", - null, - new DateTime(2014, 1, 1, 0, 0, 0), - null, - null, - 1); - assertEquals(defs.size(), 3); - } - - public void shouldExcludeWithStartTime() throws Exception { - List defs = repo.find("bob", - "cpu_utilization", - null, - new DateTime(2014, 1, 1, 0, 1, 1), - null, - null, - 1); - assertEquals(defs.size(), 0); - } - - public void shouldFindWithEndTime() throws Exception { - List defs = repo.find("bob", - "cpu_utilization", - null, - new DateTime(2014, 1, 1, 0, 0, 0), - new DateTime(2014, 1, 1, 0, 1, 1), - null, - 1); - assertEquals(defs.size(), 3); - } - - public void shouldExcludeWithEndTime() throws Exception { - List defs = repo.find("bob", - "cpu_utilization", - null, - new DateTime(2013, 1, 1, 0, 0, 0), - new DateTime(2013, 12, 31, 0, 0, 0), - null, - 1); - assertEquals(defs.size(), 0); - } - - public void shouldFindWithDimensions() throws Exception { - Map dims = new HashMap<>(); - dims.put("service", "compute"); - dims.put("instance_id", "123"); - - List defs = repo.find("bob", "cpu_utilization", dims, null, null, null, 1); - assertEquals(defs.size(), 2); - - dims.put("flavor_id", "2"); - defs = repo.find("bob", "cpu_utilization", dims, null, null, null, 1); - assertEquals(defs.size(), 1); - } -} diff --git a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricQueriesTest.java b/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricQueriesTest.java deleted file mode 100644 index 633827c48..000000000 --- a/java/src/test/java/monasca/api/infrastructure/persistence/vertica/MetricQueriesTest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.infrastructure.persistence.vertica; - -import java.util.HashMap; -import java.util.Map; - -import org.testng.annotations.Test; - -import static org.testng.AssertJUnit.assertEquals; - -@Test -public class MetricQueriesTest { - - private final static String TABLE_TO_JOIN_DIMENSIONS_ON = "defdims"; - - public void metricQueriesBuildDimensionAndClauseTest1() { - String expectedResult = - " and defdims.id in ( SELECT defDimsSub2.id FROM MonMetrics.Dimensions AS dimSub " - + "JOIN MonMetrics.DefinitionDimensions AS defDimsSub2 " - + "ON defDimsSub2.dimension_set_id = dimSub.dimension_set_id WHERE" - + " ((name = :dname0 and value = :dvalue0) or (name = :dname1 and value = :dvalue1))" - + " GROUP BY defDimsSub2.id,dimSub.dimension_set_id HAVING count(*) = 2) "; - - Map dimsMap = new HashMap<>(); - dimsMap.put("foo", "bar"); - dimsMap.put("biz", "baz"); - - String s = MetricQueries.buildDimensionAndClause(dimsMap, TABLE_TO_JOIN_DIMENSIONS_ON); - assertEquals(expectedResult, s); - } - - public void metricQueriesBuildDimensionAndClauseTest2() { - String expectedResult = ""; - Map dimsMap = new HashMap<>(); - assertEquals(expectedResult, MetricQueries.buildDimensionAndClause(dimsMap, TABLE_TO_JOIN_DIMENSIONS_ON)); - } - - public void metricQueriesBuildDimensionAndClauseForTest3() { - String expectedResult = ""; - Map dimsMap = null; - assertEquals(expectedResult, MetricQueries.buildDimensionAndClause(dimsMap, TABLE_TO_JOIN_DIMENSIONS_ON)); - } - - public void metricQueriesBuildDimensionAndClauseTest4() { - String expectedResult = - " and defdims.id in ( SELECT defDimsSub2.id FROM MonMetrics.Dimensions AS dimSub " - + "JOIN MonMetrics.DefinitionDimensions AS defDimsSub2 " - + "ON defDimsSub2.dimension_set_id = dimSub.dimension_set_id WHERE" - + " ((name = :dname0 and ( value = :dvalue0_0 or value = :dvalue0_1)))" - + " GROUP BY defDimsSub2.id,dimSub.dimension_set_id HAVING count(*) = 1) "; - - Map dimsMap = new HashMap<>(); - dimsMap.put("foo", "bar|baz"); - - String s = MetricQueries.buildDimensionAndClause(dimsMap, TABLE_TO_JOIN_DIMENSIONS_ON); - assertEquals(expectedResult, s); - } - - public void metricQueriesBuildDimensionAndClauseTest5() { - String expectedResult = - " and defdims.id in ( SELECT defDimsSub2.id FROM MonMetrics.Dimensions AS dimSub " - + "JOIN MonMetrics.DefinitionDimensions AS defDimsSub2 " - + "ON defDimsSub2.dimension_set_id = dimSub.dimension_set_id WHERE" - + " ((name = :dname0 and ( value = :dvalue0_0 or value = :dvalue0_1))" - + " or (name = :dname1 and ( value = :dvalue1_0 or value = :dvalue1_1)))" - + " GROUP BY defDimsSub2.id,dimSub.dimension_set_id HAVING count(*) = 2) "; - - Map dimsMap = new HashMap<>(); - dimsMap.put("foo", "bar|baz"); - dimsMap.put("biz", "baz|baf"); - - String s = MetricQueries.buildDimensionAndClause(dimsMap, TABLE_TO_JOIN_DIMENSIONS_ON); - assertEquals(expectedResult, s); - } -} diff --git a/java/src/test/java/monasca/api/integration/AlarmIntegrationTest.java b/java/src/test/java/monasca/api/integration/AlarmIntegrationTest.java deleted file mode 100644 index ec2447dfb..000000000 --- a/java/src/test/java/monasca/api/integration/AlarmIntegrationTest.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.integration; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.fail; - -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.ws.rs.core.MediaType; - -import kafka.javaapi.producer.Producer; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterTest; -import org.testng.annotations.BeforeTest; -import org.testng.annotations.Test; - -import com.google.common.io.Resources; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.TypeLiteral; -import com.google.inject.name.Names; -import monasca.api.ApiConfig; -import monasca.api.MonApiModule; -import monasca.api.app.AlarmDefinitionService; -import monasca.api.app.command.CreateAlarmDefinitionCommand; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.infrastructure.persistence.mysql.AlarmDefinitionMySqlRepoImpl; -import monasca.api.infrastructure.persistence.mysql.AlarmMySqlRepoImpl; -import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl; -import monasca.api.resource.AbstractMonApiResourceTest; -import monasca.api.resource.AlarmDefinitionResource; -import com.sun.jersey.api.client.ClientResponse; - -@Test(groups = "integration", enabled = false) -public class AlarmIntegrationTest extends AbstractMonApiResourceTest { - private static final String TENANT_ID = "alarm-test"; - private DBI mysqlDb; - private AlarmDefinition alarm; - private AlarmDefinitionService service; - private ApiConfig config; - private Producer producer; - private AlarmDefinitionRepo repo; - AlarmStateHistoryRepo stateHistoryRepo; - private Map dimensions; - private List alarmActions; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - - Handle handle = mysqlDb.open(); - handle.execute("truncate table alarm"); - handle.execute("truncate table notification_method"); - handle - .execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('29387234', 'alarm-test', 'MyEmail', 'EMAIL', 'a@b', NOW(), NOW())"); - handle - .execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('77778687', 'alarm-test', 'MyEmail', 'EMAIL', 'a@b', NOW(), NOW())"); - mysqlDb.close(handle); - - repo = new AlarmDefinitionMySqlRepoImpl(mysqlDb, new PersistUtils()); - service = - new AlarmDefinitionService(config, producer, repo, new AlarmMySqlRepoImpl(mysqlDb, new PersistUtils()), - new NotificationMethodMySqlRepoImpl(mysqlDb, new PersistUtils())); - addResources(new AlarmDefinitionResource(service, repo, new PersistUtils())); - } - - @BeforeTest - protected void beforeTest() throws Exception { - config = getConfiguration("config-test.yml", ApiConfig.class); - Injector injector = Guice.createInjector(new MonApiModule(environment, config)); - producer = injector.getInstance(Key.get(new TypeLiteral>() {})); - mysqlDb = injector.getInstance(Key.get(DBI.class, Names.named("mysql"))); - Handle handle = mysqlDb.open(); - handle.execute(Resources.toString( - NotificationMethodMySqlRepoImpl.class.getResource("alarm.sql"), - Charset.defaultCharset())); - handle.execute(Resources.toString( - NotificationMethodMySqlRepoImpl.class.getResource("notification_method.sql"), - Charset.defaultCharset())); - handle.close(); - - // Fixtures - dimensions = new HashMap(); - dimensions.put("instance_id", "937"); - alarmActions = new ArrayList(); - alarmActions.add("29387234"); - alarmActions.add("77778687"); - alarm = - new AlarmDefinition("123", "90% CPU", null, null, - "avg(hpcs.compute:cpu:{instance_id=123} > 10", Arrays.asList("instance_id"), true, - alarmActions, null, null); - } - - @AfterTest - protected void afterTest() throws Exception { - producer.close(); - } - - public void shouldCreate() throws Exception { - ClientResponse response = - client() - .resource("/v2.0/alarms") - .header("X-Tenant-Id", TENANT_ID) - .header("Content-Type", MediaType.APPLICATION_JSON) - .post( - ClientResponse.class, - new CreateAlarmDefinitionCommand("90% CPU", null, - "avg(hpcs.compute:cpu:{instance_id=123} > 10", Arrays.asList("instance_id"), - null, alarmActions, null, null)); - - AlarmDefinition newAlarm = response.getEntity(AlarmDefinition.class); - String location = response.getHeaders().get("Location").get(0); - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/alarms/" + newAlarm.getId()); - assertEquals(alarm.getExpression(), newAlarm.getExpression()); - assertEquals(alarm.getAlarmActions(), newAlarm.getAlarmActions()); - } - - public void shouldCreateCaseInsensitiveAndKeywords() throws Exception { - AlarmDefinition alarm_local; - alarm_local = - new AlarmDefinition("123", "90% CPU", null, null, "AvG(avg:cpu:{instance_id=123} gT 10", - Arrays.asList("instance_id"), true, alarmActions, null, null); - ClientResponse response = - client() - .resource("/v2.0/alarms") - .header("X-Tenant-Id", TENANT_ID) - .header("Content-Type", MediaType.APPLICATION_JSON) - .post( - ClientResponse.class, - new CreateAlarmDefinitionCommand("90% CPU", null, - "AvG(avg:cpu:{instance_id=123} gT 10", Arrays.asList("instance_id"), null, - alarmActions, null, null)); - - AlarmDefinition newAlarm = response.getEntity(AlarmDefinition.class); - String location = response.getHeaders().get("Location").get(0); - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/alarms/" + newAlarm.getId()); - assertEquals(alarm_local.getExpression(), newAlarm.getExpression()); - assertEquals(alarm_local.getAlarmActions(), newAlarm.getAlarmActions()); - } - - public void shouldDelete() { - AlarmDefinition newAlarm = - repo.create(TENANT_ID, "123", alarm.getName(), alarm.getName(), alarm.getSeverity(), - alarm.getExpression(), null, alarm.getMatchBy(), alarm.getAlarmActions(), - alarm.getOkActions(), alarm.getUndeterminedActions()); - assertNotNull(repo.findById(TENANT_ID, newAlarm.getId())); - - ClientResponse response = - client().resource("/v2.0/alarms/" + newAlarm.getId()).header("X-Tenant-Id", TENANT_ID) - .delete(ClientResponse.class); - assertEquals(response.getStatus(), 204); - - try { - assertNull(repo.findById(TENANT_ID, newAlarm.getId())); - fail(); - } catch (EntityNotFoundException expected) { - } - } -} diff --git a/java/src/test/java/monasca/api/integration/MetricIntegrationTest.java b/java/src/test/java/monasca/api/integration/MetricIntegrationTest.java deleted file mode 100644 index 59052fabf..000000000 --- a/java/src/test/java/monasca/api/integration/MetricIntegrationTest.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.integration; - -import static org.mockito.Mockito.mock; -import static org.testng.Assert.assertEquals; - -import java.util.HashMap; -import java.util.Map; - -import javax.ws.rs.core.MediaType; - -import kafka.javaapi.producer.Producer; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.AfterTest; -import org.testng.annotations.BeforeTest; -import org.testng.annotations.Test; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.TypeLiteral; -import monasca.api.ApiConfig; -import monasca.api.MonApiModule; -import monasca.api.app.MetricService; -import monasca.api.app.command.CreateMetricCommand; -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.AbstractMonApiResourceTest; -import monasca.api.resource.MetricResource; -import com.sun.jersey.api.client.ClientResponse; - -@Test(groups = "integration", enabled = false) -public class MetricIntegrationTest extends AbstractMonApiResourceTest { - private static final String TENANT_ID = "metric-test"; - private DBI db; - private MetricService service; - private Producer producer; - private ApiConfig config; - private MetricDefinitionRepo metricRepo; - private Map dimensions; - private Map valueMeta; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - Handle handle = db.open(); - handle.execute("truncate table access"); - db.close(handle); - metricRepo = mock(MetricDefinitionRepo.class); - service = new MetricService(config, producer, metricRegistry); - addResources(new MetricResource(config, service, metricRepo, new PersistUtils())); - } - - @BeforeTest - protected void beforeTest() throws Exception { - config = getConfiguration("config-test.yml", ApiConfig.class); - Injector injector = Guice.createInjector(new MonApiModule(environment, config)); - producer = injector.getInstance(Key.get(new TypeLiteral>() {})); - } - - @AfterTest - protected void afterTest() throws Exception { - producer.close(); - } - - public void shouldCreate() throws Exception { - dimensions = new HashMap(); - dimensions.put("instance_id", "937"); - dimensions.put("az", "2"); - dimensions.put("instance_uuid", "abc123"); - valueMeta = new HashMap(); - valueMeta.put("rc", "404"); - valueMeta.put("errMsg", "Not Found"); - long timestamp = System.currentTimeMillis(); - ClientResponse response = - client() - .resource("/v2.0/metrics") - .header("X-Tenant-Id", TENANT_ID) - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateMetricCommand("test_namespace", dimensions, timestamp, 22.0, valueMeta)); - - assertEquals(response.getStatus(), 204); - } -} diff --git a/java/src/test/java/monasca/api/integration/NotificationMethodIntegrationTest.java b/java/src/test/java/monasca/api/integration/NotificationMethodIntegrationTest.java deleted file mode 100644 index 004bc0bda..000000000 --- a/java/src/test/java/monasca/api/integration/NotificationMethodIntegrationTest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.integration; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.fail; - -import java.nio.charset.Charset; - -import javax.ws.rs.core.MediaType; - -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.testng.annotations.BeforeTest; -import org.testng.annotations.Test; - -import com.google.common.io.Resources; -import com.google.inject.Guice; -import com.google.inject.Injector; -import monasca.api.ApiConfig; -import monasca.api.MonApiModule; -import monasca.api.app.command.CreateNotificationMethodCommand; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl; -import monasca.api.resource.AbstractMonApiResourceTest; -import monasca.api.resource.NotificationMethodResource; -import com.sun.jersey.api.client.ClientResponse; - -@Test(groups = "integration") -public class NotificationMethodIntegrationTest extends AbstractMonApiResourceTest { - private static final String TENANT_ID = "notification-method-test"; - private DBI db; - private NotificationMethod notificationMethod; - private NotificationMethodRepo repo; - private ApiConfig config; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - Handle handle = db.open(); - handle.execute("truncate table notification_method"); - handle - .execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('29387234', 'notification-method-test', 'MyEmaila', 'EMAIL', 'a@b', NOW(), NOW())"); - db.close(handle); - repo = new NotificationMethodMySqlRepoImpl(db, new PersistUtils()); - addResources(new NotificationMethodResource(config, repo, new PersistUtils())); - } - - @BeforeTest - protected void beforeTest() throws Exception { - ApiConfig config = getConfiguration("config-test.yml", ApiConfig.class); - Injector injector = Guice.createInjector(new MonApiModule(environment, config)); - db = injector.getInstance(DBI.class); - Handle handle = db.open(); - handle.execute(Resources.toString( - NotificationMethodMySqlRepoImpl.class.getResource("notification_method.sql"), - Charset.defaultCharset())); - handle.close(); - - // Fixtures - notificationMethod = - new NotificationMethod("123", "Joe's Email", "EMAIL", "a@b", 0); - } - - public void shouldCreate() throws Exception { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", TENANT_ID) - .type(MediaType.APPLICATION_JSON) - .post( - ClientResponse.class, - new CreateNotificationMethodCommand(notificationMethod.getName(), - notificationMethod.getType(), notificationMethod.getAddress(), "0")); - NotificationMethod newNotificationMethod = response.getEntity(NotificationMethod.class); - String location = response.getHeaders().get("Location").get(0); - - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/notification-methods/" + newNotificationMethod.getId()); - assertEquals(newNotificationMethod.getName(), notificationMethod.getName()); - assertEquals(newNotificationMethod.getAddress(), notificationMethod.getAddress()); - assertEquals(repo.findById(TENANT_ID, newNotificationMethod.getId()), newNotificationMethod); - } - - public void shouldConflict() throws Exception { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", TENANT_ID) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", "EMAIL", "a@b", "0")); - - assertEquals(response.getStatus(), 409); - } - - public void shouldDelete() { - NotificationMethod newMethod = - repo.create(TENANT_ID, notificationMethod.getName(), notificationMethod.getType(), - notificationMethod.getAddress(), 0); - assertNotNull(repo.findById(TENANT_ID, newMethod.getId())); - - ClientResponse response = - client().resource("/v2.0/notification-methods/" + newMethod.getId()) - .header("X-Tenant-Id", TENANT_ID).delete(ClientResponse.class); - assertEquals(response.getStatus(), 204); - - try { - assertNull(repo.findById(TENANT_ID, newMethod.getId())); - fail(); - } catch (EntityNotFoundException expected) { - } - } -} diff --git a/java/src/test/java/monasca/api/integration/docker/ITInfluxDBTest.java b/java/src/test/java/monasca/api/integration/docker/ITInfluxDBTest.java deleted file mode 100644 index a28fdc2e9..000000000 --- a/java/src/test/java/monasca/api/integration/docker/ITInfluxDBTest.java +++ /dev/null @@ -1,649 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.integration.docker; - -import com.github.dockerjava.client.DockerClient; -import com.github.dockerjava.client.DockerException; -import com.github.dockerjava.client.NotFoundException; -import com.github.dockerjava.client.model.ContainerCreateResponse; -import com.github.dockerjava.client.model.ExposedPort; -import com.github.dockerjava.client.model.Ports; -import com.sun.jersey.api.client.ClientResponse; - -import org.apache.commons.io.filefilter.RegexFileFilter; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.net.SocketAddress; - -import static com.jayway.restassured.RestAssured.given; -import static com.jayway.restassured.path.json.JsonPath.from; - -@Test(groups = "integration", enabled = true) -public class ITInfluxDBTest { - - private final static String INFLUXDB_IMAGE_NAME = "monasca/api-integ-tests-influxdb"; - private static final String MYSQL_IMAGE_NAME = "monasca/api-integ-tests-mysql"; - private static final String MYSQL_CONTAINER_RUN_CMD = "/usr/bin/mysqld_safe"; - private static final String KAFKA_IMAGE_NAME = "monasca/api-integ-tests-kafka"; - private static final String KAFKA_CONTAINER_RUN_CMD = "/run.sh"; - private static final String DOCKER_IP = "192.168.59.103"; - private static final String DOCKER_PORT = "2375"; - private static final String DOCKER_URL = "http://" + DOCKER_IP + ":" + DOCKER_PORT; - private static final int MAX_CONNECT_PORT_TRIES = 10000; - - private final static DockerClient dockerClient = new DockerClient(DOCKER_URL); - private Process apiProcess = null; - private ContainerCreateResponse influxDBContainer = null; - private ContainerCreateResponse mysqlContainer = null; - private ContainerCreateResponse kafkaContainer = null; - - @BeforeClass - public void setup() throws DockerException, IOException { - - try { - - runKafka(); - - runInfluxDB(); - - runMYSQL(); - - runAPI(); - - } catch (Exception e) { - - System.err.println("Failed to setup environment"); - System.err.println(e); - tearDown(); - System.exit(-1); - } - } - - private void runAPI() throws Exception { - - if (!isPortFree(8070)) { - throw new Exception("port 8070 is not free. Unable to start instance" + " of monasca api"); - } - - String latestShadedJarFileName = getLatestShadedJarFileName(); - System.out.println("Running " + latestShadedJarFileName); - - ProcessBuilder pb = new ProcessBuilder("java", "-cp", "./target/" + latestShadedJarFileName, - "monasca.api.MonApiApplication", "server", - "src/test/resources/mon-api-config.yml"); - File log = new File("mon-api-integration-test.log"); - pb.redirectErrorStream(true); - pb.redirectOutput(ProcessBuilder.Redirect.appendTo(log)); - apiProcess = pb.start(); - - System.out.println("Started " + latestShadedJarFileName); - - waitForPortReady("localhost", 8070); - } - - private String getLatestShadedJarFileName() { - - File dir = new File("./target"); - FileFilter fileFilter = new RegexFileFilter("^mon-api-0\\.1\\.0-(\\d|\\w)+-(\\d|\\w)+\\.jar"); - File[] files = dir.listFiles(fileFilter); - if (files.length == 0) { - System.err.println("Failed to find shaded jar. You must build mon-api before running this " - + "test. Try 'mvn clean package'"); - tearDown(); - System.exit(-1); - } - System.out.println("Found " + files.length + " jar files"); - File latestFile = files[0]; - for (File file : files) { - if (file.lastModified() > latestFile.lastModified()) { - latestFile = file; - } - } - - System.out.println(latestFile.getName() + " is the latest jar file"); - return latestFile.getName(); - - } - - boolean isPortFree(int port) { - - try (Socket s = new Socket("localhost", port)) { - return false; - } catch (Exception e) { - return true; - } - - } - - private void waitForPortReady(String host, int port) { - - System.out.println("waiting to connect to host [" + host + "] on port [" + port + "]"); - - Socket s = null; - boolean isPortReady = false; - int tryCount = 0; - while (!isPortReady) { - - if (tryCount >= MAX_CONNECT_PORT_TRIES) { - System.err.println("Failed to connect to host [" + host + "] on port [" + port + "] in " + - "[" + tryCount + "] tries"); - tearDown(); - System.exit(-1); - } - - try { - s = new Socket(); - s.setReuseAddress(true); - SocketAddress sa = new InetSocketAddress(host, port); - s.connect(sa, 50000); - isPortReady = true; - System.out.println("Took " + tryCount + " tries to connect to host [" + host + "] on port" + - "[" + port + "]"); - } catch (Exception e) { - tryCount++; - } - } - - if (s != null) { - try { - s.close(); - } catch (Exception e) { - System.err.print(e); - } - } - } - - private void runKafka() { - - ClientResponse response = dockerClient.pullImageCmd(KAFKA_IMAGE_NAME).exec(); - - final ExposedPort tcp2181 = ExposedPort.tcp(2181); - final ExposedPort tcp9092 = ExposedPort.tcp(9092); - - waitForCreateContainer(new CreateContainer(KAFKA_IMAGE_NAME) { - @Override - void createContainer() { - kafkaContainer = dockerClient.createContainerCmd(KAFKA_IMAGE_NAME).withCmd(new - String[]{ - KAFKA_CONTAINER_RUN_CMD, DOCKER_IP}).withExposedPorts(tcp2181, tcp9092).exec(); - } - }); - - Ports portBindings2 = new Ports(); - portBindings2.bind(tcp2181, Ports.Binding(2181)); - portBindings2.bind(tcp9092, Ports.Binding(9092)); - - dockerClient.startContainerCmd(kafkaContainer.getId()).withPortBindings(portBindings2).exec(); - - waitForPortReady(DOCKER_IP, 2181); - waitForPortReady(DOCKER_IP, 9092); - } - - private void runMYSQL() { - - ClientResponse response = dockerClient.pullImageCmd(MYSQL_IMAGE_NAME).exec(); - - final ExposedPort tcp3306 = ExposedPort.tcp(3306); - - waitForCreateContainer(new CreateContainer(MYSQL_IMAGE_NAME) { - @Override - void createContainer() { - - mysqlContainer = dockerClient.createContainerCmd(MYSQL_IMAGE_NAME).withCmd(new - String[]{ - MYSQL_CONTAINER_RUN_CMD}).withExposedPorts(tcp3306).exec(); - } - }); - - Ports portBindings1 = new Ports(); - portBindings1.bind(tcp3306, Ports.Binding(3306)); - - dockerClient.startContainerCmd(mysqlContainer.getId()).withPortBindings(portBindings1).exec(); - - waitForPortReady(DOCKER_IP, 3306); - } - - private void runInfluxDB() { - - ClientResponse response = dockerClient.pullImageCmd(INFLUXDB_IMAGE_NAME).exec(); - - final ExposedPort tcp8083 = ExposedPort.tcp(8083); - final ExposedPort tcp8086 = ExposedPort.tcp(8086); - final ExposedPort tcp8090 = ExposedPort.tcp(8090); - final ExposedPort tcp8099 = ExposedPort.tcp(8099); - - waitForCreateContainer(new CreateContainer(INFLUXDB_IMAGE_NAME) { - @Override - void createContainer() { - influxDBContainer = dockerClient.createContainerCmd(INFLUXDB_IMAGE_NAME).withExposedPorts - (tcp8083, tcp8086, tcp8090, tcp8099).exec(); - } - }); - - Ports portBindings = new Ports(); - portBindings.bind(tcp8083, Ports.Binding(8083)); - portBindings.bind(tcp8086, Ports.Binding(8086)); - portBindings.bind(tcp8090, Ports.Binding(8090)); - portBindings.bind(tcp8099, Ports.Binding(8099)); - - dockerClient.startContainerCmd(influxDBContainer.getId()).withPortBindings(portBindings).exec(); - - waitForPortReady(DOCKER_IP, 8086); - } - - @Test - public void alarmCreateTest() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "" - + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "\"name\": \"test-alarm-1\", \"description\": \"test-alarm-description\", " - + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}") - .post("/v2.0/alarms").then().assertThat().statusCode(201); - - } - - @Test - public void alarmDeleteTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "" + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"name\": \"test-alarm-2\", \"description\": \"test-alarm-description\", " + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}").post("/v2.0/alarms").asString(); - - String alarmId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").delete("/v2.0/alarms/" + alarmId).then() - .assertThat() - .statusCode(204); - - } - - @Test - public void alarmHistoryTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "" + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"name\": \"test-alarm-3\", \"description\": \"test-alarm-description\", " + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}").post("/v2.0/alarms").asString(); - - String alarmId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .get("v2.0/alarms/" + alarmId + "/state-history").then() - .assertThat().statusCode(200); - - } - - @Test - public void alarmListTest() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").get("/v2.0/alarms").then().assertThat() - .statusCode(200); - - } - - @Test - public void alarmPatchTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "" + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"name\": \"test-alarm-4\", \"description\": \"test-alarm-description\", " + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}").post("/v2.0/alarms").asString(); - - String alarmId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").body("{}").patch("v2.0/alarms/" + alarmId) - .then() - .assertThat().statusCode(200); - - } - - @Test - public void alarmShowTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "" + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"name\": \"test-alarm-5\", \"description\": \"test-alarm-description\", " + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}").post("/v2.0/alarms").asString(); - - String alarmId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").get("v2.0/alarms/" + alarmId).then() - .assertThat() - .statusCode(200); - - } - - @Test - public void alarmUpdateTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "" + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"name\": \"test-alarm-6\", \"description\": \"test-alarm-description\", " + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\"}").post("/v2.0/alarms").asString(); - - String alarmId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").body("{\"alarm_actions\": " + - "[\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "" - + "\"ok_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "\"name\": \"test-alarm-6\", \"description\": \"test-alarm-description\", " - + - "\"undetermined_actions\": [\"044fa9be-36ef-4e51-a1d9-67ec31734908\"], " - + - "\"expression\": \"max(cpu_system_perc) > 0 and max(load_avg_1_min{hostname=mini-mon}) > " - + - "0\", \"severity\": \"low\", \"actions_enabled\":\"true\", " - + - "\"state\": \"alarm\"}").put("/v2" + - ".0/alarms/" - + alarmId) - .then().assertThat().statusCode(200); - - } - - @Test - public void measurementListTest() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").param("start_time", "1970-01-01T00:00:00Z") - .param - ("name", "cpu_system_perc").get("v2.0/metrics/measurements").then().assertThat() - .statusCode(200); - - } - - @Test - public void metricCreateTest() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .body("{\"timestamp\": 0, \"name\": \"test-metric-1\", " + - "\"value\": 1234.5678, \"dimensions\": {\"foo\": \"bar\", " + - "\"biz\": \"baz\"}}").post("/v2.0/metrics ").then().assertThat().statusCode(204); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").param("start_time", "1970-01-01T00:00:00Z") - .param - ("name", "test-metric-1").get("v2.0/metrics/measurements").then().assertThat() - .statusCode - (200); - - - } - - @Test - public void metricCreateRawTest() { - - long unixTime = System.currentTimeMillis(); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .body("{\"timestamp\":\"" + unixTime + "\" , " + - "\"name\": \"test-metric-2\", " + - "\"value\": 1234.5678, \"dimensions\": {\"foo\": \"bar\", " + - "\"biz\": \"baz\"}}").post("/v2.0/metrics ").then().assertThat().statusCode(204); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").param("start_time", "1970-01-01T00:00:00Z") - .param - ("name", "test-metric-2").get("v2.0/metrics/measurements").then().assertThat() - .statusCode - (200); - - } - - @Test - public void metricList() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").get("/v2.0/metrics").then().assertThat() - .statusCode(500); - - } - - @Test - public void metricStatisticsTest() { - - String[] stats = new String[]{"avg", "min", "max", "count", "sum"}; - - for (String stat : stats) { - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").param("start_time", "1970-01-01T00:00:00Z") - .param - ("statistics", stat).param("name", "cpu_system_perc").get("/v2.0/metrics/statistics") - .then().assertThat().statusCode(200); - } - - } - - @Test - public void notificationCreateTest() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").body("{\"type\": \"email\", " + - "" - + "\"name\": \"test-notification-1\", \"address\": \"jdoe@gmail.com\"}") - .post("/v2" + - ".0/notification-methods").then().assertThat().statusCode(201); - } - - @Test - public void notificationDeleteTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"type\": \"email\", " + - "" + "\"name\": \"test-notification-2\", \"address\": \"jdoe@gmail.com\"}") - .post("/v2" + - ".0/notification-methods").asString(); - - String notificationId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .delete("/v2.0/notification-methods/" + notificationId) - .then().assertThat().statusCode(204); - - - } - - @Test - public void notificationList() { - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").get("/v2.0/notification-methods").then() - .assertThat() - .statusCode(200); - - } - - @Test - public void notificationShowTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"type\": \"email\", " + - "" + "\"name\": \"test-notification-3\", \"address\": \"jdoe@gmail.com\"}") - .post("/v2" + - ".0/notification-methods").asString(); - - String notificationId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .get("/v2.0/notification-methods/" + notificationId) - .then().assertThat().statusCode(200); - - } - - @Test - public void notificationUpdateTest() { - - String json = given().headers("Accept", "application/json", "Content-Type", - "application/json", "X-Auth-Token", "82510970543135") - .body("{\"type\": \"email\", " + - "" + "\"name\": \"test-notification-4\", \"address\": \"jdoe@gmail.com\"}") - .post("/v2" + - ".0/notification-methods").asString(); - - String notificationId = from(json).get("id"); - - given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135").body("{\"type\": \"email\", " + - "" - + "\"name\": \"test-notification-4\", \"address\": \"jsmith@gmail.com\"}") - .put("/v2" + - ".0/notification-methods/" + notificationId).then().assertThat().statusCode(200); - - json = given().headers("Accept", "application/json", "Content-Type", "application/json", - "X-Auth-Token", "82510970543135") - .get("/v2.0/notification-methods/" + notificationId) - .asString(); - - String address = from(json).get("address"); - - assert (address.equals("jsmith@gmail.com")); - - - } - - @AfterClass - public void tearDown() { - - stopAPI(); - - stopMYSQL(); - - stopInfluxDB(); - - stopKafka(); - - - } - - private void stopAPI() { - if (apiProcess != null) { - apiProcess.destroy(); - } - } - - private void stopKafka() { - if (kafkaContainer != null) { - dockerClient.stopContainerCmd(kafkaContainer.getId()).withTimeout(2).exec(); - } - - } - - private void stopMYSQL() { - if (mysqlContainer != null) { - dockerClient.stopContainerCmd(mysqlContainer.getId()).withTimeout(2).exec(); - } - - - } - - private void stopInfluxDB() { - if (influxDBContainer != null) { - dockerClient.stopContainerCmd(influxDBContainer.getId()).withTimeout(2).exec(); - } - - } - - private static abstract class CreateContainer { - - private String imageName; - - private CreateContainer(String imageName) { - this.imageName = imageName; - } - - abstract void createContainer(); - - String getImageName() { - return imageName; - } - - } - - private void waitForCreateContainer(CreateContainer createContainer) { - - boolean isContainerCreated = false; - while (!isContainerCreated) { - try { - createContainer.createContainer(); - isContainerCreated = true; - } catch (NotFoundException e) { - System.out.println("Waiting for image " + createContainer.getImageName() + " to be pulled"); - } - } - } -} diff --git a/java/src/test/java/monasca/api/resource/AbstractMonApiResourceTest.java b/java/src/test/java/monasca/api/resource/AbstractMonApiResourceTest.java deleted file mode 100644 index b463de207..000000000 --- a/java/src/test/java/monasca/api/resource/AbstractMonApiResourceTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; -import monasca.common.dropwizard.AbstractResourceTest; -import monasca.api.resource.exception.ConstraintViolationExceptionMapper; -import monasca.api.resource.exception.EntityExistsExceptionMapper; -import monasca.api.resource.exception.EntityNotFoundExceptionMapper; -import monasca.api.resource.exception.IllegalArgumentExceptionMapper; -import monasca.api.resource.exception.InvalidEntityExceptionMapper; -import monasca.api.resource.exception.JsonMappingExceptionManager; -import monasca.api.resource.exception.JsonProcessingExceptionMapper; -import monasca.api.resource.exception.ThrowableExceptionMapper; - -/** - * Support class for monitoring resource tests. - */ -public abstract class AbstractMonApiResourceTest extends AbstractResourceTest { - @Override - protected void setupResources() throws Exception { - addSingletons(new EntityExistsExceptionMapper(), new EntityNotFoundExceptionMapper(), - new IllegalArgumentExceptionMapper(), new InvalidEntityExceptionMapper(), - new JsonProcessingExceptionMapper(), new JsonMappingExceptionManager(), - new ConstraintViolationExceptionMapper(), new ThrowableExceptionMapper() {}); - - objectMapper - .setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); - objectMapper.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY); - } -} diff --git a/java/src/test/java/monasca/api/resource/AlarmDefinitionResourceTest.java b/java/src/test/java/monasca/api/resource/AlarmDefinitionResourceTest.java deleted file mode 100644 index 9174275f5..000000000 --- a/java/src/test/java/monasca/api/resource/AlarmDefinitionResourceTest.java +++ /dev/null @@ -1,436 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyList; -import static org.mockito.Matchers.anyListOf; -import static org.mockito.Matchers.anyMap; -import static org.mockito.Matchers.anyObject; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import javax.ws.rs.core.MediaType; - -import org.testng.annotations.Test; - -import monasca.api.app.AlarmDefinitionService; -import monasca.api.app.command.CreateAlarmDefinitionCommand; -import monasca.api.app.command.UpdateAlarmDefinitionCommand; -import monasca.api.domain.model.common.Paged; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.common.model.alarm.AlarmExpression; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.alarmdefinition.AlarmDefinition; -import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo; -import monasca.api.domain.model.common.Link; -import monasca.api.resource.exception.ErrorMessages; -import monasca.common.model.alarm.AlarmSeverity; - -import com.sun.jersey.api.client.ClientResponse; - -@Test -public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest { - private String expression; - private String detExpression; - private AlarmDefinition alarm; - private AlarmDefinition detAlarm; - private AlarmDefinition alarmItem; - private AlarmDefinitionService service; - private AlarmDefinitionRepo repo; - private List alarmActions; - - @Override - @SuppressWarnings("unchecked") - protected void setupResources() throws Exception { - super.setupResources(); - - expression = "avg(disk_read_ops{service=hpcs.compute, instance_id=937}) >= 90"; - detExpression = "count(log.error{service=test,instance_id=2},deterministic) >= 10 times 10"; - List matchBy = Arrays.asList("service", "instance_id"); - alarmItem = - new AlarmDefinition("123", "Disk Exceeds 1k Operations", null, "LOW", expression, - Arrays.asList("service", "instance_id"), true, null, null, null); - alarmActions = new ArrayList(); - alarmActions.add("29387234"); - alarmActions.add("77778687"); - - alarm = - new AlarmDefinition("123", "Disk Exceeds 1k Operations", null, "LOW", expression, matchBy, - true, alarmActions, null, null); - detAlarm = - new AlarmDefinition("456", "log.error", null, "LOW", detExpression, matchBy, - true, alarmActions, null, null); - - service = mock(AlarmDefinitionService.class); - - when( - service.create(eq("abc"), eq("Disk Exceeds 1k Operations"), any(String.class), eq("LOW"), - eq(expression), eq(AlarmExpression.of(expression)), eq(matchBy), any(List.class), - any(List.class), any(List.class))).thenReturn(alarm); - when( - service.create(eq("abc"), eq("log.error"), any(String.class), eq("LOW"), - eq(detExpression), eq(AlarmExpression.of(detExpression)), eq(matchBy), any(List.class), - any(List.class), any(List.class))).thenReturn(detAlarm); - - repo = mock(AlarmDefinitionRepo.class); - when(repo.findById(eq("abc"), eq("123"))).thenReturn(alarm); - when(repo.findById(eq("abc"), eq("456"))).thenReturn(detAlarm); - when(repo.find(anyString(), anyString(), (Map) anyMap(), anyListOf( - AlarmSeverity.class), (List) anyList(), anyString(), anyInt())).thenReturn( - Arrays.asList(alarmItem)); - - addResources(new AlarmDefinitionResource(service, repo, new PersistUtils())); - } - - @SuppressWarnings("unchecked") - public void shouldCreate() { - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - assertEquals(response.getStatus(), 201); - AlarmDefinition newAlarm = response.getEntity(AlarmDefinition.class); - String location = response.getHeaders().get("Location").get(0); - assertEquals(location, "/v2.0/alarm-definitions/" + newAlarm.getId()); - assertEquals(newAlarm, alarm); - verify(service).create(eq("abc"), eq("Disk Exceeds 1k Operations"), any(String.class), - eq("LOW"), eq(expression), eq(AlarmExpression.of(expression)), - eq(Arrays.asList("service", "instance_id")), any(List.class), - any(List.class), any(List.class)); - } - - public void shouldCreateDeterministic() { - final CreateAlarmDefinitionCommand request = new CreateAlarmDefinitionCommand( - "log.error", - null, - detExpression, - Arrays.asList("service", "instance_id"), - "LOW", - alarmActions, - null, - null - ); - final ClientResponse response = this.createResponseFor(request); - - assertEquals(response.getStatus(), 201); - AlarmDefinition newAlarm = response.getEntity(AlarmDefinition.class); - String location = response.getHeaders().get("Location").get(0); - assertEquals(location, "/v2.0/alarm-definitions/" + newAlarm.getId()); - assertEquals(newAlarm, detAlarm); - - verify(service).create(eq("abc"), eq("log.error"), any(String.class), - eq("LOW"), eq(detExpression), eq(AlarmExpression.of(detExpression)), - eq(Arrays.asList("service", "instance_id")), any(List.class), - any(List.class), any(List.class)); - } - - public void shouldUpdate() { - when( - service.update(eq("abc"), eq("123"), any(AlarmExpression.class), - any(UpdateAlarmDefinitionCommand.class))).thenReturn(alarm); - ClientResponse response = - client() - .resource("/v2.0/alarm-definitions/123") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .put(ClientResponse.class, - new UpdateAlarmDefinitionCommand("Disk Exceeds 1k Operations", "", expression, - Arrays.asList("service", "instance_id"), "LOW", - true, alarmActions, new ArrayList(), - new ArrayList())); - - assertEquals(response.getStatus(), 200); - verify(service).update(eq("abc"), eq("123"), any(AlarmExpression.class), - any(UpdateAlarmDefinitionCommand.class)); - } - - public void shouldErrorOnCreateWithDuplicateDimensions() { - String expression = - "avg(hpcs.compute{instance_id=937, instance_id=123, az=2, instance_uuid=abc123, metric_name=disk_read_ops}) >= 90"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "The alarm expression is invalid", - "More than one value was given for dimension instance_id"); - } - - @SuppressWarnings("unchecked") - public void shouldNotRequireDimensionsForCustomNamespace() { - String expression = "avg(foo{metric_name=bar}) >= 90"; - when( - service.create(eq("abc"), eq("Disk Exceeds 1k Operations"), any(String.class), eq("LOW"), - eq(expression), eq(AlarmExpression.of(expression)), any(List.class), any(List.class), - any(List.class), any(List.class))).thenReturn(alarm); - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - assertEquals(response.getStatus(), 201); - } - - public void shouldErrorOnCreateWithInvalidJson() { - ClientResponse response = - createResponseFor("{\"alarmasdf\"::{\"name\":\"Disk Exceeds 1k Operations\"}}"); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("bad_request", 400, - "Unable to process the provided JSON", - "Unexpected character (':'"); - } - - public void shouldErrorOnCreateWithInvalidOperator() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops}) ^ 90"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "The alarm expression is invalid", - "Syntax Error"); - } - - public void shouldErrorOnCreateWith0Period() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops},0) >= 90"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Period must not be 0"); - } - - public void shouldErrorOnCreateWithNonMod60Period() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops},61) >= 90"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Period 61 must be a multiple of 60"); - } - - public void shouldErrorOnCreateWithPeriodsLessThan1() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops}) >= 90 times 0"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Periods 0 must be greater than or equal to 1"); - } - - public void shouldErrorOnCreateWithPeriodTimesPeriodsGT2Weeks() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops},60) >= 90 times 20161"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Period 60 times 20161 must total less than 2 weeks in seconds (1209600)"); - } - - public void shouldErrorOnCreateWithTooLongName() { - String expression = - "avg(hpcs.compute{instance_id=937, az=2, instance_uuid=0ff588fc-d298-482f-bb11-4b52d56801a4, metric_name=disk_read_ops}) >= 90"; - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand( - "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789", - null, expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, - null)); - - ErrorMessages - .assertThat(response.getEntity(String.class)) - .matches("unprocessable_entity", 422, - "Name 012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 must be 255 characters or less"); - } - - public void shouldErrorOnCreateWithTooLongAlarmAction() { - alarmActions = new ArrayList(); - alarmActions.add("012345678901234567890123456789012345678901234567890"); - ClientResponse response = - createResponseFor(new CreateAlarmDefinitionCommand("Disk Exceeds 1k Operations", null, - expression, Arrays.asList("service", "instance_id"), "LOW", alarmActions, null, null)); - - ErrorMessages - .assertThat(response.getEntity(String.class)) - .matches("unprocessable_entity", 422, - "Alarm action 012345678901234567890123456789012345678901234567890 must be 50 characters or less"); - } - - @SuppressWarnings("unchecked") - public void shouldList() { - - - Map lhm = (Map) client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc") - .get(Paged.class).elements.get(0); - - AlarmDefinition ad = new AlarmDefinition((String) lhm.get("id"), (String) lhm.get("name"), - (String) lhm.get("description"), - (String) lhm.get("severity"), - (String) lhm.get("expression"), - (List) lhm.get("match_by"), - (boolean) lhm.get("actions_enabled"), - (List) lhm.get("alarm_actions"), - (List) lhm.get("ok_actions"), - (List) lhm.get("undetermined_actions")); - - - List> links = (List>) lhm.get("links"); - List linksList = Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href"))); - - ad.setLinks(linksList); - - List alarms = Arrays.asList(ad); - - assertEquals(alarms, Arrays.asList(alarmItem)); - - verify(repo).find(eq("abc"), anyString(), (Map) anyMap(), anyListOf(AlarmSeverity.class), - (List) anyList(), - anyString(), anyInt()); - } - - @SuppressWarnings("unchecked") - public void shouldListByName() throws Exception { - Map - lhm = - (Map) client() - .resource("/v2.0/alarm-definitions?name=" + URLEncoder.encode("foo bar baz", "UTF-8")) - .header("X-Tenant-Id", "abc").get(Paged.class).elements.get(0); - - AlarmDefinition - ad = - new AlarmDefinition((String) lhm.get("id"), (String) lhm.get("name"), - (String) lhm.get("description"), (String) lhm.get("severity"), - (String) lhm.get("expression"), (List) lhm.get("match_by"), - (boolean) lhm.get("actions_enabled"), - (List) lhm.get("alarm_actions"), - (List) lhm.get("ok_actions"), - (List) lhm.get("undetermined_actions")); - - List> links = (List>) lhm.get("links"); - List - linksList = - Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href"))); - - ad.setLinks(linksList); - - List alarms = Arrays.asList(ad); - - assertEquals(alarms, Arrays.asList(alarmItem)); - verify(repo).find(eq("abc"), eq("foo bar baz"), (Map) anyMap(), anyListOf(AlarmSeverity.class), (List) anyList(), - anyString(), anyInt()); - } - - public void shouldGet() { - assertEquals( - client().resource("/v2.0/alarm-definitions/123").header("X-Tenant-Id", "abc") - .get(AlarmDefinition.class), alarm); - verify(repo).findById(eq("abc"), eq("123")); - } - - public void should404OnGetInvalid() { - doThrow(new EntityNotFoundException(null)).when(repo).findById(eq("abc"), eq("999")); - - try { - client().resource("/v2.0/alarm-definitions/999").header("X-Tenant-Id", "abc").get( - AlarmDefinition.class); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("404")); - } - } - - public void shouldDelete() { - ClientResponse response = - client().resource("/v2.0/alarm-definitions/123").header("X-Tenant-Id", "abc") - .delete(ClientResponse.class); - assertEquals(response.getStatus(), 204); - verify(service).delete(eq("abc"), eq("123")); - } - - public void should404OnDeleteInvalid() { - doThrow(new EntityNotFoundException(null)).when(service).delete(eq("abc"), eq("999")); - - try { - client().resource("/v2.0/alarm-definitions/999").header("X-Tenant-Id", "abc").delete(); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("404")); - } - } - - @SuppressWarnings("unchecked") - public void should500OnInternalException() { - doThrow(new RuntimeException("")).when(repo).find(anyString(), anyString(), - - (Map) anyObject(), anyListOf(AlarmSeverity.class), (List) anyList(), anyString(), anyInt()); - - try { - client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc").get(List.class); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("500"), e.getMessage()); - } - } - - public void shouldHydateLinksOnList() { - List expected = Arrays.asList(new Link("self", "/v2.0/alarm-definitions/123")); - - Map - lhm = - (Map) client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc") - .get(Paged.class).elements.get(0); - - List> links = (List>) lhm.get("links"); - - List actual = Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href"))); - assertEquals(actual, expected); - } - - public void shouldHydateLinksOnGet() { - List links = - Arrays.asList(new Link("self", "/v2.0/alarm-definitions/123")); - assertEquals( - client().resource("/v2.0/alarm-definitions/123").header("X-Tenant-Id", "abc") - .get(AlarmDefinition.class).getLinks(), links); - } - - private ClientResponse createResponseFor(Object request) { - return client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON).post(ClientResponse.class, request); - } -} diff --git a/java/src/test/java/monasca/api/resource/DimensionResourceTest.java b/java/src/test/java/monasca/api/resource/DimensionResourceTest.java deleted file mode 100644 index e4bfecfdd..000000000 --- a/java/src/test/java/monasca/api/resource/DimensionResourceTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -import org.testng.annotations.Test; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.dimension.DimensionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -import com.sun.jersey.api.client.ClientResponse; - -@Test -public class DimensionResourceTest extends AbstractMonApiResourceTest { - private DimensionRepo dimensionRepo; - private ApiConfig apiConfig; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - - dimensionRepo = mock(DimensionRepo.class); - apiConfig = mock(ApiConfig.class); - addResources(new DimensionResource(apiConfig, dimensionRepo, new PersistUtils())); - } - - @SuppressWarnings("unchecked") - public void shouldQueryDimensionValuesWithDefaultParams() throws Exception { - - client() - .resource( - "/v2.0/metrics/dimensions/names/values?dimension_name=hpcs.compute") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - verify(dimensionRepo).findValues(anyString(), anyString(), anyString(), anyString(), - anyInt()); - } - - public void shouldQueryDimensionValuesWithOptionalMetricName() throws Exception { - - client() - .resource( - "/v2.0/metrics/dimensions/names/values?dimension_name=hpcs.compute&metric_name=cpu_utilization") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - verify(dimensionRepo).findValues(anyString(), anyString(), anyString(), anyString(), - anyInt()); - } - - public void shouldQueryDimensionNamesWithDefaultParams() throws Exception { - - client() - .resource( - "/v2.0/metrics/dimensions/names") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - verify(dimensionRepo).findNames(anyString(), anyString(), anyString(), anyInt()); - } -} diff --git a/java/src/test/java/monasca/api/resource/LinksTest.java b/java/src/test/java/monasca/api/resource/LinksTest.java deleted file mode 100644 index 9bb02b73a..000000000 --- a/java/src/test/java/monasca/api/resource/LinksTest.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import monasca.api.domain.model.alarm.Alarm; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Paged; -import monasca.common.model.alarm.AlarmState; -import static org.testng.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import org.joda.time.DateTime; -import org.testng.annotations.Test; - -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.UriInfo; - -@Test -public class LinksTest { - private static final String ALARM_DEF_ID = "af72b3d8-51f3-4eee-8086-535b5e7a9dc8"; - - public void shouldPrefixForHttps() throws UnsupportedEncodingException, URISyntaxException { - Links.accessedViaHttps = true; - assertEquals(Links.prefixForHttps("http://abc123blah/blah/blah"), - "https://abc123blah/blah/blah"); - assertEquals(Links.prefixForHttps("https://abc123blah/blah/blah"), - "https://abc123blah/blah/blah"); - - checkSelfNextLinks(true); - - // Negative - Links.accessedViaHttps = false; - assertEquals(Links.prefixForHttps("http://abc123blah/blah/blah"), "http://abc123blah/blah/blah"); - assertEquals(Links.prefixForHttps("https://abc123blah/blah/blah"), - "https://abc123blah/blah/blah"); - - checkSelfNextLinks(false); - } - - private void checkSelfNextLinks(final boolean returnHttps) throws URISyntaxException, - UnsupportedEncodingException { - final String base = "http://TheVip:8070/v2.0/alarms"; - final String limitParam = "limit=1"; - final String url = base + "?" + limitParam; - final UriInfo uriInfo = mock(UriInfo.class); - when(uriInfo.getRequestUri()).thenReturn(new URI(url)); - when(uriInfo.getAbsolutePath()).thenReturn(new URI(base)); - - final Map params = new HashMap<>(); - params.put("limit", "1"); - @SuppressWarnings("unchecked") - final MultivaluedMap mockParams = mock(MultivaluedMap.class); - when(uriInfo.getQueryParameters()).thenReturn(mockParams); - when(mockParams.keySet()).thenReturn(params.keySet()); - when(mockParams.get("limit")).thenReturn(Arrays.asList("1")); - - // Since limit is 1, need to give two elements so code knows to add next link - final List elements = new ArrayList<>(); - elements.add(createAlarm()); - elements.add(createAlarm()); - - final int limit = 1; - final Paged expected = new Paged(); - final List links = new ArrayList<>(); - String expectedSelf = url; - String expectedNext = base + "?offset=" + elements.get(0).getId() + "&" + limitParam; - if (returnHttps) { - expectedSelf = expectedSelf.replace("http", "https"); - expectedNext = expectedNext.replace("http", "https"); - } - links.add(new Link("self", expectedSelf)); - links.add(new Link("next", expectedNext)); - expected.links = links; - - final ArrayList expectedElements = new ArrayList(); - // Since limit is one, only the first element is returned - expectedElements.add(elements.get(0)); - expected.elements = expectedElements; - final Paged actual = Links.paginate(limit, elements, uriInfo); - assertEquals(actual, expected); - } - - private Alarm createAlarm() { - final String alarmId = UUID.randomUUID().toString(); - final Alarm alarm = new Alarm(alarmId, ALARM_DEF_ID, "Test", "LOW", null, AlarmState.OK, - "OPEN", null, - DateTime.parse("2015-03-14T09:26:53"), - DateTime.parse("2015-03-14T09:26:53"), - DateTime.parse("2015-03-14T09:26:53")); - return alarm; - } - - public void shouldHydrate() throws URISyntaxException { - checkHydrate("http://localhost:8070/"); - checkHydrate("https://localhost/"); - checkHydrate("https://localhost//"); - checkHydrate(""); // Should not happen but handle relative paths - } - - private void checkHydrate(final String base) throws URISyntaxException { - final UriInfo uriInfo = mock(UriInfo.class); - final URI uri = new URI(base); - when(uriInfo.getBaseUri()).thenReturn(uri); - final Alarm alarm = createAlarm(); - alarm.setId("42"); - Links.hydrate(alarm.getAlarmDefinition(), uriInfo, AlarmDefinitionResource.ALARM_DEFINITIONS_PATH); - assertEquals(alarm.getAlarmDefinition().getLinks().size(), 1); - assertEquals(alarm.getAlarmDefinition().getLinks().get(0), new Link("self", base - // Have to cut the first / off of AlarmDefinitionResource.ALARM_DEFINITIONS_PATH - + AlarmDefinitionResource.ALARM_DEFINITIONS_PATH.substring(1) + "/" - + ALARM_DEF_ID)); - } - -} diff --git a/java/src/test/java/monasca/api/resource/MetricResourceTest.java b/java/src/test/java/monasca/api/resource/MetricResourceTest.java deleted file mode 100644 index 7b6f0f3eb..000000000 --- a/java/src/test/java/monasca/api/resource/MetricResourceTest.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static monasca.common.dropwizard.JsonHelpers.fromJson; -import static monasca.common.dropwizard.JsonHelpers.jsonFixture; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.testng.Assert.assertEquals; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.ws.rs.core.MediaType; - -import org.testng.annotations.Test; - -import monasca.api.ApiConfig; -import monasca.api.app.MetricService; -import monasca.api.app.command.CreateMetricCommand; -import monasca.api.domain.model.metric.MetricDefinitionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.exception.ErrorMessages; -import com.sun.jersey.api.client.ClientResponse; - -@Test -public class MetricResourceTest extends AbstractMonApiResourceTest { - private Map dimensions; - private Map valueMeta; - private MetricService service; - private MetricDefinitionRepo metricRepo; - long timestamp; - - @Override - @SuppressWarnings("unchecked") - protected void setupResources() throws Exception { - super.setupResources(); - dimensions = new HashMap(); - dimensions.put("instance_id", "937"); - dimensions.put("service", "foo.compute"); - valueMeta = new HashMap(); - valueMeta.put("rc", "404"); - valueMeta.put("errorMsg", "Not Found"); - timestamp = System.currentTimeMillis(); - - service = mock(MetricService.class); - doNothing().when(service).create(any(List.class), anyString(), anyString()); - - metricRepo = mock(MetricDefinitionRepo.class); - addResources(new MetricResource(new ApiConfig(), service, metricRepo, new PersistUtils())); - } - - @SuppressWarnings("unchecked") - public void shouldCreate() { - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, timestamp, 22.0, - valueMeta)); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldCreateSet() throws Exception { - String json = jsonFixture("fixtures/metricSet.json"); - CreateMetricCommand[] metrics = fromJson(json, CreateMetricCommand[].class); - metrics[0].timestamp = timestamp; - metrics[1].timestamp = timestamp; - ClientResponse response = createResponseFor(metrics); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldCreateWithNonNumericAZ() { - Map dims = new HashMap(); - dims.put("instance_id", "937"); - dims.put("service", "foo.compute"); - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dims, timestamp, 22.0, - valueMeta)); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldErrorOnCreateWithoutTimestamp() throws Exception { - String json = jsonFixture("fixtures/metricWithoutTimestamp.json"); - CreateMetricCommand metric = fromJson(json, CreateMetricCommand.class); - ClientResponse response = createResponseFor(metric); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "[timestamp may not be null"); - } - - @SuppressWarnings("unchecked") - public void shouldCreateWithoutDimensions() throws Exception { - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", null, timestamp, 22.0, - valueMeta)); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldCreateWithZeroValue() { - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, timestamp, 0.0, - valueMeta)); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldCreateWithNegativeValue() { - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, timestamp, -1.0, - valueMeta)); - - assertEquals(response.getStatus(), 204); - verify(service).create(any(List.class), eq("abc"), anyString()); - } - - @SuppressWarnings("unchecked") - public void shouldErrorOnCreateWithZeroTimestamp() { - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, 0L, 0.0, - valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - String.format("Timestamp %s is out of legal range", 0L)); - } - - public void shouldErrorOnPostWithCrossTenant() { - ClientResponse response = - createResponseForCrossTenant(new CreateMetricCommand("test_metrictype", dimensions, - timestamp, 22.0, valueMeta), "def"); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("forbidden", 403, - "Project abc cannot POST cross tenant"); - } - - public void shouldErrorOnCreateWithIllegalCharsInName() { - ClientResponse response = - createResponseFor(new CreateMetricCommand("hpcs{.compute%", dimensions, timestamp, 22.0, - valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Metric name hpcs{.compute% may not contain: > < = { } ( ) ' \" \\ , ; &"); - } - - public void shouldErrorOnCreateWithTooLongName() { - ClientResponse response = - createResponseFor(new CreateMetricCommand( - "1234567890123456789012345678901234567890123456789012345678901234567890" + - "1234567890123456789012345678901234567890123456789012345678901234567890" + - "1234567890123456789012345678901234567890123456789012345678901234567890" + - "1234567890123456789012345678901234567890123456789012345678901234567890" + - "1234567890123456789012345678901234567890123456789012345678901234567890", dimensions, - timestamp, 22.0, valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - String.format("[name size must be between 1 and %d", CreateMetricCommand.MAX_NAME_LENGTH)); - } - - public void shouldErrorOnCreateWithReservedService() { - Map dims = new HashMap<>(); - dims.put("instance_id", "937"); - dims.put("service", "hpcs.compute"); - ClientResponse response = - createResponseFor(new CreateMetricCommand("foo", dims, timestamp, 22.0, - valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("forbidden", 403, - "Project abc cannot POST metrics for the hpcs service"); - } - - public void shouldErrorOnCreateWithoutName() throws Exception { - String json = jsonFixture("fixtures/metricWithoutName.json"); - CreateMetricCommand metric = fromJson(json, CreateMetricCommand.class); - metric.timestamp = timestamp; - ClientResponse response = createResponseFor(metric); - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "[name may not be empty"); - } - - public void shouldErrorOnCreateWithMissingDimensionKey() throws Exception { - String json = jsonFixture("fixtures/metricWithoutDimensionName.json"); - ClientResponse response = createResponseFor(json); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Dimension name cannot be empty"); - } - - public void shouldErrorOnCreateWithBadDimensionValue() { - Map dims = new HashMap(); - dims.put("blah", ""); - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dims, timestamp, 22.0, - valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Dimension 'blah' cannot have an empty value"); - } - - public void shouldErrorOnCreateWithMissingDimensionValue() { - Map dims = new HashMap(); - dims.put("instance_id", "937"); - dims.put("az", "2"); - dims.put("instance_uuid", "abc123"); - dims.put("flavor_id", ""); - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dims, timestamp, 22.0, - valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Dimension 'flavor_id' cannot have an empty value"); - } - - public void shouldErrorOnCreateWithTooLongDimensionName() { - Map dims = new HashMap(); - dims.put("instance_id", "937"); - dims.put("az", "2"); - dims.put("instance_uuid", "abc123"); - dims.put( - "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789abc", - "abc123"); - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dims, timestamp, 22.0, - valueMeta)); - - ErrorMessages - .assertThat(response.getEntity(String.class)) - .matches( - "unprocessable_entity", - 422, - "Dimension name '012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789abc' must be 255 characters or less"); - } - - public void shouldErrorOnCreateWithTooLongDimensionValue() { - Map dims = new HashMap(); - dims.put("instance_id", "937"); - dims.put("az", "2"); - dims.put("instance_uuid", "abc123"); - dims.put( - "abc", - "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789abc"); - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dims, timestamp, 22.0, - valueMeta)); - - ErrorMessages - .assertThat(response.getEntity(String.class)) - .matches( - "unprocessable_entity", - 422, - "Dimension 'abc' value '012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789abc' must be 255 characters or less"); - } - - public void shouldErrorOnCreateWithHighTimestamp() { - long local_timestamp = timestamp + 1000000; - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, local_timestamp, - 22.0, valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Timestamp " + local_timestamp + " is out of legal range"); - } - - public void shouldErrorOnCreateWithLowTimestamp() { - long local_timestamp = timestamp - 1309600000; - ClientResponse response = - createResponseFor(new CreateMetricCommand("test_metrictype", dimensions, local_timestamp, - 22.0, valueMeta)); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Timestamp " + local_timestamp + " is out of legal range"); - } - - public void shouldErrorOnCreateWithValuesToBeString() throws Exception { - ClientResponse response = - createResponseFor("{\"namespace\": \"foo\",\"timestamp\": 1380750420,\"value\": \"foo\"}"); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Unable to process the provided JSON"); - } - - private ClientResponse createResponseFor(Object request) { - return client().resource("/v2.0/metrics").header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON).post(ClientResponse.class, request); - } - - private ClientResponse createResponseForCrossTenant(Object request, String crossTenantId) { - return client().resource("/v2.0/metrics?tenant_id=" + crossTenantId) - .header("X-Tenant-Id", "abc").header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, request); - } - - public void shouldErrorOnCreateWithoutValue() throws Exception { - String json = jsonFixture("fixtures/metricWithoutValue.json"); - CreateMetricCommand metric = fromJson(json, CreateMetricCommand.class); - metric.timestamp = timestamp; - ClientResponse response = createResponseFor(metric); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "[value may not be null"); - } -} \ No newline at end of file diff --git a/java/src/test/java/monasca/api/resource/NotificationMethodResourceTest.java b/java/src/test/java/monasca/api/resource/NotificationMethodResourceTest.java deleted file mode 100644 index 73c8be9ed..000000000 --- a/java/src/test/java/monasca/api/resource/NotificationMethodResourceTest.java +++ /dev/null @@ -1,402 +0,0 @@ -/* - * (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.GenericType; - -import monasca.api.ApiConfig; -import monasca.api.app.command.CreateNotificationMethodCommand; -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.common.Paged; -import monasca.api.domain.model.notificationmethod.NotificationMethod; -import monasca.api.domain.model.notificationmethod.NotificationMethodRepo; -import monasca.api.infrastructure.persistence.PersistUtils; -import monasca.api.resource.exception.ErrorMessages; - -import org.testng.annotations.Test; - -import javax.ws.rs.core.MediaType; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - -@Test -public class NotificationMethodResourceTest extends AbstractMonApiResourceTest { - private NotificationMethod notificationMethod, notificationMethodWebhook, notificationMethodPagerduty; - private NotificationMethodRepo repo; - private ApiConfig config; - - private static final String NOTIFICATION_METHOD_WEBHOOK = "WEBHOOK"; - private static final String NOTIFICATION_METHOD_EMAIL = "EMAIL"; - private static final String NOTIFICATION_METHOD_PAGERDUTY = "PAGERDUTY"; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - notificationMethod = - new NotificationMethod("123", "Joe's Email", NOTIFICATION_METHOD_EMAIL, "a@b", 0); - notificationMethodWebhook = - new NotificationMethod("1234", "MyWh", NOTIFICATION_METHOD_WEBHOOK, "http://localhost", 60); - notificationMethodPagerduty = - new NotificationMethod("12345", "MyPd", NOTIFICATION_METHOD_PAGERDUTY, "nzH2LVRdMzun11HNC2oD", 0); - - repo = mock(NotificationMethodRepo.class); - when(repo.create(eq("abc"), eq("MyEmail"), eq(NOTIFICATION_METHOD_EMAIL), anyString(), eq(0))) - .thenReturn(notificationMethod); - when(repo.create(eq("abc"), eq("MyWh"), eq(NOTIFICATION_METHOD_WEBHOOK), anyString(), anyInt())) - .thenReturn(notificationMethodWebhook); - when(repo.create(eq("abc"), eq("MyPd"), eq(NOTIFICATION_METHOD_PAGERDUTY), anyString(), eq(0))) - .thenReturn(notificationMethodPagerduty); - when(repo.findById(eq("abc"), eq("123"))).thenReturn(notificationMethod); - when(repo.find(eq("abc"), (List) anyList(), anyString(), anyInt())) - .thenReturn(Arrays.asList(notificationMethod)); - - config = mock(ApiConfig.class); - config.validNotificationPeriods = Arrays.asList(0, 60); - addResources(new NotificationMethodResource(config, repo, new PersistUtils())); - } - - public void shouldCreate() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@a.com", "0")); - - NotificationMethod newNotificationMethod = response.getEntity(NotificationMethod.class); - String location = response.getHeaders().get("Location").get(0); - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/notification-methods/" + newNotificationMethod.getId()); - assertEquals(newNotificationMethod, notificationMethod); - verify(repo).create(eq("abc"), eq("MyEmail"), eq(NOTIFICATION_METHOD_EMAIL), anyString(), eq(0)); - } - - public void shouldUpdate() { - when( - repo.update(eq("abc"), anyString(), anyString(), any(String.class), - anyString(), eq(0))).thenReturn(notificationMethod); - ClientResponse response = - client() - .resource("/v2.0/notification-methods/123") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .put(ClientResponse.class, - new CreateNotificationMethodCommand("Foo", NOTIFICATION_METHOD_EMAIL, "a@a.com", "0")); - - assertEquals(response.getStatus(), 200); - verify(repo).update(eq("abc"), eq("123"), eq("Foo"), eq(NOTIFICATION_METHOD_EMAIL), - eq("a@a.com"), eq(0)); - } - - public void should422OnBadEnum() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", null, "a@b", "0")); - - String e = response.getEntity(String.class); - ErrorMessages.assertThat(e).matches("unprocessable_entity", 422, - "[type may not be null (was null)]"); - } - - public void should422OnIncorrectAddressFormat() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@", "0")); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Address a@ is not of correct format"); - } - - public void should422OnIncorrectAddressFormat2() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@f ,", "0")); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "Address a@f , is not of correct format"); - } - - public void should422OnBadAddress() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "", "0")); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "[address may not be empty (was )"); - } - - public void should422OnTooLongName() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post( - ClientResponse.class, - new CreateNotificationMethodCommand( - "01234567889012345678890123456788901234567889012345678890123456788901234567889012345678890123456788901234567889" - + "01234567889012345678890123456788901234567889012345678890123456788901234567889012345678890123456788901234567889" - + "01234567889012345678890123456788901234567889012345678890123456788901234567889012345678890123456788901234567889", - NOTIFICATION_METHOD_EMAIL, "a@b", "0")); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "[name size must be between 1 and 250"); - } - - public void should422OnTooLongAddress() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post( - ClientResponse.class, - new CreateNotificationMethodCommand( - "MyEmail", - NOTIFICATION_METHOD_EMAIL, - "abcdefghi@0123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "12345678901234567890123456789012345678901234567890" - + "123456789012345678901234567890", "0")); - - String e = response.getEntity(String.class); - ErrorMessages.assertThat(e).matches("unprocessable_entity", 422, - "[address size must be between 1 and 512"); - } - - public void should422OnNonZeroPeriodForEmail() { - ClientResponse response = - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyEmail", NOTIFICATION_METHOD_EMAIL, "a@a.com", "60")); - - String e = response.getEntity(String.class); - ErrorMessages.assertThat(e).matches("unprocessable_entity", 422, - "Period can not be non zero for EMAIL"); - } - - public void should422OnNonZeroPeriodForPagerduty() { - ClientResponse response = - client().resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyPd", NOTIFICATION_METHOD_PAGERDUTY, - "http://localhost", "60")); - - String e = response.getEntity(String.class); - ErrorMessages.assertThat(e).matches("unprocessable_entity", 422, - "Period can not be non zero for PAGERDUTY"); - } - - public void should422OnInvalidPeriodForWebhook() { - ClientResponse response = - client().resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyWh", NOTIFICATION_METHOD_WEBHOOK, - "http://localhost", "5")); - - String e = response.getEntity(String.class); - ErrorMessages.assertThat(e).matches("unprocessable_entity", 422, - "5 is not a valid period"); - } - - public void shouldList() { - - - Map - lhm = - (Map) client().resource("/v2.0/notification-methods").header("X-Tenant-Id", "abc") - .get(Paged.class).elements.get(0); - - NotificationMethod - nm = - new NotificationMethod((String) lhm.get("id"), (String) lhm.get("name"), - (String) lhm.get("type"), - (String) lhm.get("address"), 0); - - List notificationMethods = Arrays.asList(nm); - assertEquals(notificationMethods, Arrays.asList(notificationMethod)); - verify(repo).find(eq("abc"), (List) anyList(), anyString(), anyInt()); - } - - public void shouldGet() { - assertEquals(client().resource("/v2.0/notification-methods/123").header("X-Tenant-Id", "abc") - .get(NotificationMethod.class), notificationMethod); - verify(repo).findById(eq("abc"), eq("123")); - } - - public void should404OnGetInvalid() { - doThrow(new EntityNotFoundException("Not Found")).when(repo).findById(anyString(), anyString()); - - try { - client().resource("/v2.0/notification-methods/999").header("X-Tenant-Id", "abc") - .get(NotificationMethod.class); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("404")); - } - } - - public void shouldDelete() { - ClientResponse response = - client().resource("/v2.0/notification-methods/123").header("X-Tenant-Id", "abc") - .delete(ClientResponse.class); - assertEquals(response.getStatus(), 204); - verify(repo).deleteById(eq("abc"), eq("123")); - } - - public void should404OnDeleteInvalid() { - doThrow(new EntityNotFoundException("Not Found")).when(repo).deleteById(anyString(), - anyString()); - - try { - client().resource("/v2.0/notification-methods/999").header("X-Tenant-Id", "abc").delete(); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("404")); - } - } - - public void should500OnInternalException() { - doThrow(new RuntimeException("")).when(repo).find(anyString(), (List) anyList(), - anyString(), anyInt()); - - try { - client().resource("/v2.0/notification-methods").header("X-Tenant-Id", "abc") - .get(new GenericType>() {}); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("500")); - } - } - - public void should422OnCreateInvalid() { - try { - client() - .resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(NotificationMethod.class, - new CreateNotificationMethodCommand(null, null, "8675309", "0")); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("422")); - } - } - - public void shouldFailNullInput() { - ClientResponse response = - client().resource("/v2.0/notification-methods").header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON).post(ClientResponse.class, null); - - ErrorMessages.assertThat(response.getEntity(String.class)).matches("unprocessable_entity", 422, - "The request entity was empty"); - } - - public void shouldCreateWebhookNotification() { - ClientResponse response = - client().resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyWh", NOTIFICATION_METHOD_WEBHOOK, - "http://localhost", "0")); - - NotificationMethod newNotificationMethod = response.getEntity(NotificationMethod.class); - String location = response.getHeaders().get("Location").get(0); - - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/notification-methods/" + newNotificationMethod.getId()); - assertEquals(newNotificationMethod, notificationMethodWebhook); - verify(repo).create(eq("abc"), eq("MyWh"), eq(NOTIFICATION_METHOD_WEBHOOK), anyString(), eq(0)); - } - - public void shouldCreateWebhookNotificationWithNonZeroPeriod() { - ClientResponse response = - client().resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyWh", NOTIFICATION_METHOD_WEBHOOK, - "http://localhost", "60")); - - NotificationMethod newNotificationMethod = response.getEntity(NotificationMethod.class); - String location = response.getHeaders().get("Location").get(0); - - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/notification-methods/" + newNotificationMethod.getId()); - assertEquals(newNotificationMethod, notificationMethodWebhook); - verify(repo).create(eq("abc"), eq("MyWh"), eq(NOTIFICATION_METHOD_WEBHOOK), anyString(), eq(60)); - } - - public void shouldCreatePagerdutyNotification() { - ClientResponse response = - client().resource("/v2.0/notification-methods") - .header("X-Tenant-Id", "abc") - .header("Content-Type", MediaType.APPLICATION_JSON) - .post(ClientResponse.class, - new CreateNotificationMethodCommand("MyPd", NOTIFICATION_METHOD_PAGERDUTY, - "http://localhost", "0")); - - NotificationMethod newNotificationMethod = response.getEntity(NotificationMethod.class); - String location = response.getHeaders().get("Location").get(0); - - assertEquals(response.getStatus(), 201); - assertEquals(location, "/v2.0/notification-methods/" + newNotificationMethod.getId()); - assertEquals(newNotificationMethod, notificationMethodPagerduty); - verify(repo).create(eq("abc"), eq("MyPd"), eq(NOTIFICATION_METHOD_PAGERDUTY), anyString(), eq(0)); - } -} diff --git a/java/src/test/java/monasca/api/resource/NotificationMethodTypeResourceTest.java b/java/src/test/java/monasca/api/resource/NotificationMethodTypeResourceTest.java deleted file mode 100644 index 3505ee052..000000000 --- a/java/src/test/java/monasca/api/resource/NotificationMethodTypeResourceTest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * (C) Copyright 2016 Hewlett Packard Enterprise Development LP - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - -import org.testng.annotations.Test; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.common.Paged; -import monasca.api.domain.model.notificationmethod.NotificationMethodType; -import monasca.api.domain.model.notificationmethod.NotificationMethodTypesRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -@Test -public class NotificationMethodTypeResourceTest extends AbstractMonApiResourceTest { - - private ApiConfig config; - NotificationMethodTypesResource resource; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - config = mock(ApiConfig.class); - config.validNotificationPeriods = Arrays.asList(0, 60); - - List NOTIFICATION_METHODS = Arrays.asList("Email", "PagerDuty", "WebHook"); - - NotificationMethodTypesRepo repo = mock(NotificationMethodTypesRepo.class); - when(repo.listNotificationMethodTypes()) - .thenReturn(NOTIFICATION_METHODS); - - resource = new NotificationMethodTypesResource(config, repo,new PersistUtils()); - addResources(resource); - } - - - - - private Set getNotificationMethods(List elements) - { - Set returnNotificationMethods = new TreeSet(); - - for ( Object p : elements){ - Map mp = (Map)p; - NotificationMethodType m = new NotificationMethodType((String)mp.get("type")); - returnNotificationMethods.add(m.getType()); - } - return returnNotificationMethods; - - } - - public void shouldListCorrectNotifcationTypes() throws Exception - { - List pages = (List) client().resource("/v2.0/notification-methods/types").get(Paged.class).elements; - - Set responseGot = getNotificationMethods(pages); - Set expectedNotificationMethodTypes = new TreeSet(Arrays.asList("EMAIL", "WEBHOOK", "PAGERDUTY")); - assertEquals(responseGot, expectedNotificationMethodTypes); - - // Change the config to have one notification type - - NotificationMethodTypesRepo repo = mock(NotificationMethodTypesRepo.class); - when(repo.listNotificationMethodTypes()) - .thenReturn(Arrays.asList("Email")); - resource.repo = repo; - pages = (List) client().resource("/v2.0/notification-methods/types").get(Paged.class).elements; - responseGot = getNotificationMethods(pages); - - expectedNotificationMethodTypes = new TreeSet(Arrays.asList("EMAIL")); - assertEquals(responseGot, expectedNotificationMethodTypes); - - - // Change the config to have more than one notification type - repo = mock(NotificationMethodTypesRepo.class); - when(repo.listNotificationMethodTypes()) - .thenReturn(Arrays.asList("Email", "Type1", "Type2", "Type3")); - resource.repo = repo; - pages = (List) client().resource("/v2.0/notification-methods/types").get(Paged.class).elements; - - responseGot = getNotificationMethods(pages); - expectedNotificationMethodTypes = new TreeSet(Arrays.asList("EMAIL", "TYPE1", "TYPE2", "TYPE3")); - assertEquals(responseGot, expectedNotificationMethodTypes); - - - } - -} diff --git a/java/src/test/java/monasca/api/resource/StatisticResourceTest.java b/java/src/test/java/monasca/api/resource/StatisticResourceTest.java deleted file mode 100644 index 23b22dae1..000000000 --- a/java/src/test/java/monasca/api/resource/StatisticResourceTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.testng.Assert.assertEquals; - -import java.util.List; -import java.util.Map; - -import org.joda.time.DateTime; -import org.testng.annotations.Test; - -import monasca.api.ApiConfig; -import monasca.api.domain.model.statistic.StatisticRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -import com.sun.jersey.api.client.ClientResponse; - -@Test -public class StatisticResourceTest extends AbstractMonApiResourceTest { - private StatisticRepo statisticRepo; - private ApiConfig apiConfig; - long timestamp; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - - statisticRepo = mock(StatisticRepo.class); - apiConfig = mock(ApiConfig.class); - addResources(new StatisticResource(apiConfig, statisticRepo, new PersistUtils())); - } - - @SuppressWarnings("unchecked") - public void shouldQueryWithDefaultParams() throws Exception { - - client() - .resource( - "/v2.0/metrics/statistics?name=cpu_utilization&start_time=2013-11-20T18:43Z&dimensions=service:hpcs.compute,%20instance_id:123&statistics=avg,%20min,%20max&period=60") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - verify(statisticRepo).find(anyString(), anyString(), any(Map.class), any(DateTime.class), - any(DateTime.class), any(List.class), anyInt(), any(String.class), anyInt(), - anyBoolean(), any(List.class)); - } - - public void queryShouldThrowOnInvalidDateFormat() throws Exception { - ClientResponse response = - client() - .resource( - "/v2.0/metrics/statistics?name=cpu_utilization&dimensions=service:hpcs.compute,%20instance_id:123&start_time=2013-1120&statistics=avg") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - assertEquals(response.getStatus(), 422); - } - - public void queryShouldThrowOnInvalidPeriodDataType() throws Exception { - ClientResponse response = - client() - .resource( - "/v2.0/metrics/statistics?name=cpu_utilization&dimensions=service:hpcs.compute,%20instance_id:123&start_time=2013-11-20T18:43Z&statistics=avg&period=foo") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - assertEquals(response.getStatus(), 422); - } - - public void queryShouldThrowOnInvalidStatistics() throws Exception { - ClientResponse response = - client() - .resource( - "/v2.0/metrics/statistics?name=cpu_utilization&dimensions=service:hpcs.compute,%20instance_id:123&start_time=2013-11-20T18:43Z&statistics=foo,bar") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - assertEquals(response.getStatus(), 422); - } - - public void queryShouldThrowOnInvalidPeriod() throws Exception { - ClientResponse response = - client() - .resource( - "/v2.0/metrics/statistics?name=cpu_utilization&dimensions=service:hpcs.compute,%20instance_id:123&start_time=2013-11-20T18:43Z&statistics=avg&period=foo") - .header("X-Tenant-Id", "abc").get(ClientResponse.class); - assertEquals(response.getStatus(), 422); - } -} diff --git a/java/src/test/java/monasca/api/resource/VersionResourceTest.java b/java/src/test/java/monasca/api/resource/VersionResourceTest.java deleted file mode 100644 index e43b0d571..000000000 --- a/java/src/test/java/monasca/api/resource/VersionResourceTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource; - -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import org.joda.time.DateTime; -import org.testng.annotations.Test; - -import monasca.api.domain.exception.EntityNotFoundException; -import monasca.api.domain.model.common.Link; -import monasca.api.domain.model.common.Paged; -import monasca.api.domain.model.version.Version; -import monasca.api.domain.model.version.Version.VersionStatus; -import monasca.api.domain.model.version.VersionRepo; -import monasca.api.infrastructure.persistence.PersistUtils; - -import com.sun.jersey.api.client.GenericType; - -@Test -public class VersionResourceTest extends AbstractMonApiResourceTest { - private Version version; - private VersionRepo repo; - - @Override - protected void setupResources() throws Exception { - super.setupResources(); - version = new Version("v2.0", VersionStatus.CURRENT, new DateTime(1355253328)); - version.setLinks(Arrays.asList(new Link("self", - "https://cloudsvc.example.com/v2.0"))); - - repo = mock(VersionRepo.class); - when(repo.findById(eq("v2.0"))).thenReturn(version); - when(repo.find()).thenReturn(Arrays.asList(version)); - addResources(new VersionResource(repo, new PersistUtils())); - } - - public void shouldList() { - - Map - lhm = - (Map) client().resource("/").header("X-Tenant-Id", "abc").get(Paged.class).elements.get(0); - - Version - actual = - new Version((String) lhm.get("id"), VersionStatus.valueOf((String) lhm.get("status")), - new DateTime((int) lhm.get("updated"))); - - List> links = (List>) lhm.get("links"); - List - linksList = - Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href"))); - - actual.setLinks(linksList); - - assertEquals(actual, version); - verify(repo).find(); - } - - public void shouldGet() { - assertEquals(client().resource("/v2.0").get(Version.class), version); - verify(repo).findById(eq("v2.0")); - } - - public void should404OnGetInvalid() { - doThrow(new EntityNotFoundException("")).when(repo).findById(anyString()); - - try { - client().resource("/v9.9").get(Version.class); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("404")); - } - } - - public void should500OnInternalException() { - doThrow(new RuntimeException("")).when(repo).find(); - - try { - client().resource("/").get(new GenericType>() {}); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("500")); - } - } -} diff --git a/java/src/test/java/monasca/api/resource/exception/ErrorMessages.java b/java/src/test/java/monasca/api/resource/exception/ErrorMessages.java deleted file mode 100644 index 486fae746..000000000 --- a/java/src/test/java/monasca/api/resource/exception/ErrorMessages.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2014,2016 Hewlett Packard Enterprise Development Company, L.P. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package monasca.api.resource.exception; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - -import javax.annotation.Nullable; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * Error message utilities. - */ -public final class ErrorMessages { - private static final ObjectMapper MAPPER = new ObjectMapper(); - - public interface ErrorMessageMatcher { - void matches(String faultType, int code, String messagePrefix); - - void matches(String faultType, int code, String messagePrefix, @Nullable String detailsPrefix); - } - - public static ErrorMessageMatcher assertThat(final String errorMessage) { - try { - JsonNode node = MAPPER.readTree(errorMessage); - final String rootKey = node.fieldNames().next(); - node = node.get(rootKey); - final ErrorMessage message = MAPPER.reader(ErrorMessage.class).readValue(node); - - return new ErrorMessageMatcher() { - @Override - public void matches(String faultType, int code, String messagePrefix) { - matches(faultType, code, messagePrefix, null); - } - - @Override - public void matches(String faultType, int code, String messagePrefix, - @Nullable String detailsPrefix) { - assertEquals(rootKey, faultType); - assertEquals(message.code, code); - assertTrue(message.message.startsWith(messagePrefix), - String.format("String '%s' does not start with '%s'", message.message, messagePrefix)); - if (detailsPrefix != null) - assertTrue(message.details.startsWith(detailsPrefix), message.details); - } - }; - - } catch (Exception e) { - throw new RuntimeException(e); - } - } -} diff --git a/java/src/test/resources/fixtures/alarm.json b/java/src/test/resources/fixtures/alarm.json deleted file mode 100644 index 04f287819..000000000 --- a/java/src/test/resources/fixtures/alarm.json +++ /dev/null @@ -1 +0,0 @@ -{"id":"123","links":[{"rel":"self","href":"https://cloudsvc.example.com/v1.0"}],"name":"90% CPU","description":"","expression":"avg(hpcs.compute{instance_id=666, image_id=345}) >= 90","deterministic":false,"match_by":[],"severity":"LOW","actions_enabled":false,"alarm_actions":["123345345","23423"],"ok_actions":null,"undetermined_actions":null} diff --git a/java/src/test/resources/fixtures/metricSet.json b/java/src/test/resources/fixtures/metricSet.json deleted file mode 100644 index dc7ad59b5..000000000 --- a/java/src/test/resources/fixtures/metricSet.json +++ /dev/null @@ -1 +0,0 @@ -[{"name":"user1","dimensions":{"instance_id": "392633"},"value":127},{"name":"user2","dimensions":{"instance_id": "392633"},"value":127}] \ No newline at end of file diff --git a/java/src/test/resources/fixtures/metricWithoutDimensionName.json b/java/src/test/resources/fixtures/metricWithoutDimensionName.json deleted file mode 100644 index d7ac7d2a0..000000000 --- a/java/src/test/resources/fixtures/metricWithoutDimensionName.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"foo","dimensions":{"": "392633"},"timestamp":1366178760,"value":127} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/metricWithoutName.json b/java/src/test/resources/fixtures/metricWithoutName.json deleted file mode 100644 index a8dc8ac10..000000000 --- a/java/src/test/resources/fixtures/metricWithoutName.json +++ /dev/null @@ -1 +0,0 @@ -{"dimensions":{"instance_id": "392633"},"timestamp":1366178760,"value":127} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/metricWithoutTimestamp.json b/java/src/test/resources/fixtures/metricWithoutTimestamp.json deleted file mode 100644 index 53289c737..000000000 --- a/java/src/test/resources/fixtures/metricWithoutTimestamp.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"user_name","dimensions":{"instance_id": "392633"},"value":128} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/metricWithoutValue.json b/java/src/test/resources/fixtures/metricWithoutValue.json deleted file mode 100644 index 8b62d6ac7..000000000 --- a/java/src/test/resources/fixtures/metricWithoutValue.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"user_name","dimensions":{"instance_id": "392633"},"timestamp":1366178760} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/newAlarm.json b/java/src/test/resources/fixtures/newAlarm.json deleted file mode 100644 index 9540e343f..000000000 --- a/java/src/test/resources/fixtures/newAlarm.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"Disk Exceeds 1k Operations","expression":"avg(hpcs.compute:cpu:1:{instance_id=5}) > 5","alarm_actions":["123345345","23423"]}} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/newNotificationMethod.json b/java/src/test/resources/fixtures/newNotificationMethod.json deleted file mode 100644 index 5ce0955b2..000000000 --- a/java/src/test/resources/fixtures/newNotificationMethod.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"MyEmail","type":"EMAIL","address":"a@b"} diff --git a/java/src/test/resources/fixtures/newNotificationMethodWithInvalidEnum.json b/java/src/test/resources/fixtures/newNotificationMethodWithInvalidEnum.json deleted file mode 100644 index dea1141f7..000000000 --- a/java/src/test/resources/fixtures/newNotificationMethodWithInvalidEnum.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"MyEmail","type":"invalid_enum","address":"a@b"} diff --git a/java/src/test/resources/fixtures/newNotificationMethodWithLowercaseEnum.json b/java/src/test/resources/fixtures/newNotificationMethodWithLowercaseEnum.json deleted file mode 100644 index 940388043..000000000 --- a/java/src/test/resources/fixtures/newNotificationMethodWithLowercaseEnum.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"MyEmail","type":"email","address":"a@b"} diff --git a/java/src/test/resources/fixtures/newNotificationMethodWithPeriod.json b/java/src/test/resources/fixtures/newNotificationMethodWithPeriod.json deleted file mode 100644 index 51ead08e0..000000000 --- a/java/src/test/resources/fixtures/newNotificationMethodWithPeriod.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"MyWebhook","type":"webhook","address":"http://somedomain.com","period":"60"} \ No newline at end of file diff --git a/java/src/test/resources/fixtures/notificationMethod.json b/java/src/test/resources/fixtures/notificationMethod.json deleted file mode 100644 index 546041352..000000000 --- a/java/src/test/resources/fixtures/notificationMethod.json +++ /dev/null @@ -1 +0,0 @@ -{"id":"123","links":[{"rel":"self","href":"https://cloudsvc.example.com/v1.0"}],"name":"MyEmail","type":"EMAIL","address":"a@b", "period":0} diff --git a/java/src/test/resources/fixtures/version.json b/java/src/test/resources/fixtures/version.json deleted file mode 100644 index ea0d380bc..000000000 --- a/java/src/test/resources/fixtures/version.json +++ /dev/null @@ -1 +0,0 @@ -{"id":"1.0","links":[{"rel":"self","href":"https://cloudsvc.example.com/v1.0"}],"status":"CURRENT","updated":"2012-12-11T19:15:28.000Z"} diff --git a/java/src/test/resources/monasca-api-config.yml b/java/src/test/resources/monasca-api-config.yml deleted file mode 100644 index 4f958f912..000000000 --- a/java/src/test/resources/monasca-api-config.yml +++ /dev/null @@ -1,142 +0,0 @@ -# The region for which all metrics passing through this server will be persisted -region: useast - -# Whether this server is running on a secure port -accessedViaHttps: false - -# Topic for publishing metrics to -metricsTopic: metrics - -# Topic for publishing domain events to -eventsTopic: events - -validNotificationPeriods: - - 60 - -databaseConfiguration: - # vertica | influxdb - databaseType: influxdb - -kafka: - brokerUris: - - 192.168.59.103:9092 - zookeeperUris: - - 192.168.59.103:2181 - healthCheckTopic: healthcheck - -mysql: - driverClass: com.mysql.jdbc.Driver - url: jdbc:mysql://localhost:3306/mon?connectTimeout=5000&autoReconnect=true - user: monapi - password: password - maxWaitForConnection: 1s - validationQuery: "/* MyService Health Check */ SELECT 1" - minSize: 8 - maxSize: 32 - checkConnectionWhileIdle: false - checkConnectionOnBorrow: true - -# vertica: -# driverClass: com.vertica.jdbc.Driver -# url: jdbc:vertica://192.168.10.4/mon -# user: mon_api -# password: password -# maxWaitForConnection: 1s -# validationQuery: "/* MyService Health Check */ SELECT 1" -# minSize: 4 -# maxSize: 32 -# checkConnectionWhileIdle: false -# # -# # vertica database hint to be added to SELECT -# # statements. For example, the hint below is used -# # to tell vertica that the query can be satisfied -# # locally (replicated projection). -# # -# # dbHint: "/*+KV(01)*/" -# dbHint: "" - - -influxDB: - name: mon - replicationFactor: 1 - url: http://192.168.59.103:8086 - user: root - password: root - -middleware: - enabled: false - serverVIP: 192.168.10.5 - serverPort: 5000 - useHttps: false - connTimeout: 500 - connSSLClientAuth: false - connPoolMaxActive: 3 - connPoolMaxIdle: 3 - connPoolEvictPeriod: 600000 - connPoolMinIdleTime: 600000 - connRetryTimes: 2 - connRetryInterval: 50 - defaultAuthorizedRoles: [user, domainuser, domainadmin,heat_stack_owner,_member_] - readOnlyAuthorizedRoles: [monasca-read-only-user] - agentAuthorizedRoles: [monasca-agent] - adminAuthMethod: password - adminUser: admin - adminPassword: password - adminToken: ADMIN - timeToCacheToken: 600 - maxTokenCacheSize: 1048576 - -server: - applicationConnectors: - - type: http - maxRequestHeaderSize: 16KiB # Allow large headers used by keystone tokens - - -# Logging settings. -logging: - - # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, DEBUG, TRACE, or ALL. - level: debug - - # Logger-specific levels. - loggers: - - # Sets the level for 'com.example.app' to DEBUG. - com.example.app: DEBUG - monasca: debug - - appenders: - - type: console - threshold: debug - timeZone: UTC - target: stdout - logFormat: # TODO - - - type: file - currentLogFilename: ./log/monasca-api/monasca-api.log - threshold: debug - archive: true - archivedLogFilenamePattern: /var/log/monasca/monasca-api-%d.log.gz - archivedFileCount: 5 - timeZone: UTC - logFormat: # TODO - - - type: syslog - host: localhost - port: 514 - facility: local0 - threshold: ALL - logFormat: # TODO - -hibernate: - supportEnabled: true - providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider - dataSourceClassName: org.postgresql.ds.PGPoolingDataSource - serverName: localhost - portNumber: 5432 - databaseName: mon - user: mon - password: mon - initialConnections: 25 - maxConnections: 100 - autoConfig: validate diff --git a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/alarm.sql b/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/alarm.sql deleted file mode 100644 index 8c3d76177..000000000 --- a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/alarm.sql +++ /dev/null @@ -1,89 +0,0 @@ -CREATE TABLE `alarm` ( - `id` varchar(36) NOT NULL, - `alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `state` varchar(20) NOT NULL check state in ('UNDETERMINED','OK','ALARM'), - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`), - KEY `tenant_id` (`alarm_definition_id`) -); - -CREATE TABLE `alarm_definition` ( - `id` varchar(36) NOT NULL, - `tenant_id` varchar(36) NOT NULL, - `name` varchar(250) DEFAULT NULL, - `description` varchar(250) DEFAULT NULL, - `expression` mediumtext, - `severity` varchar(20) NOT NULL check severity in ('LOW','MEDIUM','HIGH','CRITICAL'), - `match_by` varchar(255) DEFAULT '', - `actions_enabled` tinyint(1) NOT NULL DEFAULT '1', - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - `deleted_at` datetime DEFAULT NULL, - PRIMARY KEY (`id`) -); - -CREATE TABLE `alarm_metric` ( - `alarm_id` varchar(36) NOT NULL, - `metric_definition_dimensions_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - PRIMARY KEY (`alarm_id`,`metric_definition_dimensions_id`) -); - -CREATE TABLE `metric_definition` ( - `id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `name` varchar(255) NOT NULL, - `tenant_id` varchar(36) NOT NULL, - `region` varchar(255) NOT NULL DEFAULT '', - PRIMARY KEY (`id`) -); - -CREATE TABLE `metric_definition_dimensions` ( - `id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `metric_definition_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `metric_dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - PRIMARY KEY (`id`) -); - -CREATE TABLE `metric_dimension` ( - `dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `name` varchar(255) NOT NULL DEFAULT '', - `value` varchar(255) NOT NULL DEFAULT '' -); - -CREATE TABLE `sub_alarm_definition` ( - `id` varchar(36) NOT NULL, - `alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `function` varchar(10) NOT NULL, - `metric_name` varchar(100) DEFAULT NULL, - `operator` varchar(5) NOT NULL, - `threshold` double NOT NULL, - `period` int(11) NOT NULL, - `periods` int(11) NOT NULL, - `is_deterministic` tinyint(1) NOT NULL DEFAULT '0', - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); - -CREATE TABLE `sub_alarm_definition_dimension` ( - `sub_alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `dimension_name` varchar(50) NOT NULL DEFAULT '', - `value` varchar(300) DEFAULT NULL, - PRIMARY KEY (`sub_alarm_definition_id`,`dimension_name`) -); - -CREATE TABLE `sub_alarm` ( - `id` varchar(36) NOT NULL, - `alarm_id` varchar(36) NOT NULL DEFAULT '', - `expression` mediumtext NOT NULL, - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`), -); - -CREATE TABLE `alarm_action` ( - `alarm_definition_id` varchar(36) NOT NULL, - `alarm_state` varchar(20) NOT NULL check alarm_state in ('UNDETERMINED','OK','ALARM'), - `action_id` varchar(36) NOT NULL DEFAULT '', - PRIMARY KEY (`alarm_definition_id`,`alarm_state`,`action_id`) -); diff --git a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method.sql b/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method.sql deleted file mode 100644 index 54f7ceee3..000000000 --- a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE `notification_method` ( - `id` varchar(36) NOT NULL, - `tenant_id` varchar(36) NOT NULL DEFAULT '', - `name` varchar(250) NOT NULL DEFAULT '', - `type` varchar(10) NOT NULL DEFAULT 'EMAIL' check type in ('EMAIL', 'WEBHOOK', 'PAGERDUTY'), - `address` varchar(100) NOT NULL DEFAULT '', - `period` int NOT NULL DEFAULT 0, - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); diff --git a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method_type.sql b/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method_type.sql deleted file mode 100644 index cebd5c4eb..000000000 --- a/java/src/test/resources/monasca/api/infrastructure/persistence/mysql/notification_method_type.sql +++ /dev/null @@ -1,4 +0,0 @@ -CREATE TABLE `notification_method_type` ( - `name` varchar(20) NOT NULL DEFAULT '', - PRIMARY KEY (`name`) -); diff --git a/java/src/test/resources/monasca/api/infrastructure/persistence/vertica/metrics.sql b/java/src/test/resources/monasca/api/infrastructure/persistence/vertica/metrics.sql deleted file mode 100644 index 77a121fb7..000000000 --- a/java/src/test/resources/monasca/api/infrastructure/persistence/vertica/metrics.sql +++ /dev/null @@ -1,79 +0,0 @@ -CREATE SCHEMA MonMetrics; - -CREATE TABLE MonMetrics.Measurements ( - id AUTO_INCREMENT, - metric_definition_id BINARY(20) NOT NULL, - time_stamp TIMESTAMP NOT NULL, - value FLOAT NOT NULL, - PRIMARY KEY(id) -) PARTITION BY EXTRACT('year' FROM time_stamp)*10000 + EXTRACT('month' FROM time_stamp)*100 + EXTRACT('day' FROM time_stamp); - -CREATE TABLE MonMetrics.Definitions ( - id BINARY(20) NOT NULL, - name VARCHAR NOT NULL, - tenant_id VARCHAR(14) NOT NULL, - region VARCHAR NOT NULL, - PRIMARY KEY(id), - CONSTRAINT MetricsDefinitionsConstraint UNIQUE(id, name, tenant_id, region) -); - -CREATE TABLE MonMetrics.Dimensions ( - metric_definition_id BINARY(20) NOT NULL, - name VARCHAR NOT NULL, - value VARCHAR NOT NULL, - CONSTRAINT MetricsDimensionsConstraint UNIQUE(metric_definition_id, name, value) -); - -CREATE PROJECTION Measurements_DBD_1_rep_MonMetrics /*+createtype(D)*/ -( - id ENCODING AUTO, - metric_definition_id ENCODING RLE, - time_stamp ENCODING DELTAVAL, - value ENCODING AUTO -) -AS - SELECT id, - metric_definition_id, - time_stamp, - value - FROM MonMetrics.Measurements - ORDER BY metric_definition_id, - time_stamp, - id -UNSEGMENTED ALL NODES; - -CREATE PROJECTION Definitions_DBD_2_rep_MonMetrics /*+createtype(D)*/ -( - id ENCODING RLE, - name ENCODING AUTO, - tenant_id ENCODING RLE, - region ENCODING RLE -) -AS - SELECT id, - name, - tenant_id, - region - FROM MonMetrics.Definitions - ORDER BY id, - tenant_id, - region, - name -UNSEGMENTED ALL NODES; - -CREATE PROJECTION Dimensions_DBD_4_rep_MonMetrics /*+createtype(D)*/ -( - metric_definition_id ENCODING RLE, - name ENCODING AUTO, - value ENCODING AUTO -) -AS - SELECT metric_definition_id, - name, - value - FROM MonMetrics.Dimensions - ORDER BY metric_definition_id, - name -UNSEGMENTED ALL NODES; - -select refresh('MonMetrics.Measurements, MonMetrics.Definitions, MonMetrics.Dimensions'); \ No newline at end of file diff --git a/monasca_api/__init__.py b/monasca_api/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/api/__init__.py b/monasca_api/api/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/api/alarm_definitions_api_v2.py b/monasca_api/api/alarm_definitions_api_v2.py deleted file mode 100644 index c1d507484..000000000 --- a/monasca_api/api/alarm_definitions_api_v2.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class AlarmDefinitionsV2API(object): - def __init__(self): - super(AlarmDefinitionsV2API, self).__init__() - LOG.info('Initializing AlarmDefinitionsV2API!') - - def on_post(self, req, res): - res.status = '501 Not Implemented' - - def on_get(self, req, res, alarm_definition_id): - res.status = '501 Not Implemented' - - def on_put(self, req, res, alarm_definition_id): - res.status = '501 Not Implemented' - - def on_patch(self, req, res, alarm_definition_id): - res.status = '501 Not Implemented' - - def on_delete(self, req, res, alarm_definition_id): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/alarms_api_v2.py b/monasca_api/api/alarms_api_v2.py deleted file mode 100644 index b40969d72..000000000 --- a/monasca_api/api/alarms_api_v2.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class AlarmsV2API(object): - def __init__(self): - super(AlarmsV2API, self).__init__() - LOG.info('Initializing AlarmsV2API!') - - def on_put(self, req, res, alarm_id): - res.status = '501 Not Implemented' - - def on_patch(self, req, res, alarm_id): - res.status = '501 Not Implemented' - - def on_delete(self, req, res, alarm_id): - res.status = '501 Not Implemented' - - def on_get(self, req, res, alarm_id): - res.status = '501 Not Implemented' - - -class AlarmsCountV2API(object): - def __init__(self): - super(AlarmsCountV2API, self).__init__() - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - -class AlarmsStateHistoryV2API(object): - def __init__(self): - super(AlarmsStateHistoryV2API, self).__init__() - LOG.info('Initializing AlarmsStateHistoryV2API!') - - def on_get(self, req, res, alarm_id): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/core/__init__.py b/monasca_api/api/core/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/api/core/log/__init__.py b/monasca_api/api/core/log/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/api/core/log/exceptions.py b/monasca_api/api/core/log/exceptions.py deleted file mode 100644 index 1038daee6..000000000 --- a/monasca_api/api/core/log/exceptions.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2021 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import falcon - - -HTTP_422 = '422 Unprocessable Entity' - - -class HTTPUnprocessableEntity(falcon.HTTPError): - """HTTPUnprocessableEntity http error. - - HTTPError that comes with '422 Unprocessable Entity' status - - :argument: message(str) - meaningful description of what caused an error - :argument: kwargs - any other option defined in - :py:class:`falcon.HTTPError` - """ - def __init__(self, message, **kwargs): - falcon.HTTPError.__init__(self, - HTTP_422, - 'unprocessable_entity', - message, - **kwargs - ) diff --git a/monasca_api/api/core/log/log_publisher.py b/monasca_api/api/core/log/log_publisher.py deleted file mode 100644 index 85495d7b6..000000000 --- a/monasca_api/api/core/log/log_publisher.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2015 kornicameister@gmail.com -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import time - -import falcon -from monasca_api.common.rest import utils as rest_utils -from monasca_common.kafka import client_factory -from oslo_log import log -from oslo_utils import encodeutils - -from monasca_api.api.core.log import model -from monasca_api import conf - -LOG = log.getLogger(__name__) -CONF = conf.CONF - -_RETRY_AFTER = 60 -_TIMESTAMP_KEY_SIZE = len( - bytearray(str(int(time.time() * 1000)).encode('utf-8'))) -_TRUNCATED_PROPERTY_SIZE = len( - bytearray('"truncated": true'.encode('utf-8'))) -_KAFKA_META_DATA_SIZE = 32 -_TRUNCATION_SAFE_OFFSET = 1 - - -class InvalidMessageException(Exception): - pass - - -class LogPublisher(object): - """Publishes log data to Kafka - - LogPublisher is able to send single message to multiple configured topic. - It uses following configuration written in conf file :: - - [log_publisher] - topics = 'logs' - kafka_url = 'localhost:8900' - - Note: - Uses :py:class:`monasca_common.kafka.producer.KafkaProducer` - to ship logs to kafka. For more details - see `monasca_common`_ github repository. - - .. _monasca_common: https://github.com/openstack/monasca-common - - """ - - def __init__(self): - self._topics = CONF.kafka.logs_topics - self.max_message_size = CONF.log_publisher.max_message_size - - self._kafka_publisher = client_factory.get_kafka_producer( - CONF.kafka.uri, CONF.kafka.legacy_kafka_client_enabled) - - LOG.info('Initializing LogPublisher <%s>', self) - - def send_message(self, messages): - """Sends message to each configured topic. - - Note: - Falsy messages (i.e. empty) are not shipped to kafka - - See also - * :py:class:`monasca_log_api.common.model.Envelope` - * :py:meth:`._is_message_valid` - - :param dict|list messages: instance (or instances) of log envelope - """ - - if not messages: - return - if not isinstance(messages, list): - messages = [messages] - - num_of_msgs = len(messages) - - LOG.debug('About to publish %d messages to %s topics', - num_of_msgs, self._topics) - - try: - send_messages = [] - - for message in messages: - msg = self._transform_message(message) - send_messages.append(msg) - self._publish(send_messages) - - except Exception as ex: - LOG.exception('Failure in publishing messages to kafka') - raise ex - - def _transform_message(self, message): - """Transforms message into JSON. - - Method executes transformation operation for - single element. Operation is set of following - operations: - - * checking if message is valid - (:py:func:`.LogPublisher._is_message_valid`) - * truncating message if necessary - (:py:func:`.LogPublisher._truncate`) - - :param model.Envelope message: instance of message - :return: serialized message - :rtype: str - """ - if not self._is_message_valid(message): - raise InvalidMessageException() - truncated = self._truncate(message) - return encodeutils.safe_encode(truncated, incoming='utf-8') - - def _truncate(self, envelope): - """Truncates the message if needed. - - Each message send to kafka is verified. - Method checks if message serialized to json - exceeds maximum allowed size that can be posted to kafka - queue. If so, method truncates message property of the log - by difference between message and allowed size. - - :param Envelope envelope: original envelope - :return: serialized message - :rtype: str - """ - - msg_str = model.serialize_envelope(envelope) - envelope_size = ((len(bytearray(msg_str, 'utf-8', 'replace')) + - _TIMESTAMP_KEY_SIZE + - _KAFKA_META_DATA_SIZE) - if msg_str is not None else -1) - - diff_size = ((envelope_size - self.max_message_size) + - _TRUNCATION_SAFE_OFFSET) - - if diff_size > 1: - truncated_by = diff_size + _TRUNCATED_PROPERTY_SIZE - - LOG.warning(('Detected message that exceeds %d bytes,' - 'message will be truncated by %d bytes'), - self.max_message_size, - truncated_by) - - log_msg = envelope['log']['message'] - truncated_log_msg = log_msg[:-truncated_by] - - envelope['log']['truncated'] = True - envelope['log']['message'] = truncated_log_msg - - msg_str = rest_utils.as_json(envelope) - - return msg_str - - def _publish(self, messages): - """Publishes messages to kafka. - - :param list messages: list of messages - """ - num_of_msg = len(messages) - - LOG.debug('Publishing %d messages', num_of_msg) - - try: - for topic in self._topics: - self._kafka_publisher.publish( - topic, - messages - ) - LOG.debug('Sent %d messages to topic %s', num_of_msg, topic) - except Exception as ex: - raise falcon.HTTPServiceUnavailable('Service unavailable', - str(ex), 60) - - @staticmethod - def _is_message_valid(message): - """Validates message before sending. - - Methods checks if message is :py:class:`model.Envelope`. - By being instance of this class it is ensured that all required - keys are found and they will have their values. - - """ - return message and isinstance(message, model.Envelope) - - def _after_publish(self, send_count, to_send_count): - """Executed after publishing to sent metrics. - - :param int send_count: how many messages have been sent - :param int to_send_count: how many messages should be sent - - """ - - failed_to_send = to_send_count - send_count - - if failed_to_send == 0: - LOG.debug('Successfully published all [%d] messages', - send_count) - else: - error_str = ('Failed to send all messages, %d ' - 'messages out of %d have not been published') - LOG.error(error_str, failed_to_send, to_send_count) diff --git a/monasca_api/api/core/log/model.py b/monasca_api/api/core/log/model.py deleted file mode 100644 index 687cf4141..000000000 --- a/monasca_api/api/core/log/model.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils -import six - -from monasca_api.common.rest import utils as rest_utils - - -def serialize_envelope(envelope): - """Returns json representation of an envelope. - - :return: json object of envelope - :rtype: six.text_type - - """ - json = rest_utils.as_json(envelope, ensure_ascii=False) - - if six.PY2: - raw = six.text_type(json.replace(r'\\', r'\\\\'), encoding='utf-8', - errors='replace') - else: - raw = json - - return raw - - -class LogEnvelopeException(Exception): - pass - - -class Envelope(dict): - def __init__(self, log, meta): - if not log: - error_msg = 'Envelope cannot be created without log' - raise LogEnvelopeException(error_msg) - if 'tenantId' not in meta or not meta.get('tenantId'): - error_msg = 'Envelope cannot be created without tenant' - raise LogEnvelopeException(error_msg) - - creation_time = self._get_creation_time() - super(Envelope, self).__init__( - log=log, - creation_time=creation_time, - meta=meta - ) - - @staticmethod - def _get_creation_time(): - return timeutils.utcnow_ts() - - @classmethod - def new_envelope(cls, log, tenant_id, region, dimensions=None): - """Creates new log envelope - - Log envelope is combined ouf of following properties - - * log - dict - * creation_time - timestamp - * meta - meta block - - Example output json would like this: - - .. code-block:: json - - { - "log": { - "message": "Some message", - "dimensions": { - "hostname": "devstack" - } - }, - "creation_time": 1447834886, - "meta": { - "tenantId": "e4bd29509eda473092d32aadfee3e7b1", - "region": "pl" - } - } - - :param dict log: original log element (containing message and other - params - :param str tenant_id: tenant id to be put in meta field - :param str region: region to be put in meta field - :param dict dimensions: additional dimensions to be appended to log - object dimensions - - """ - if dimensions: - log['dimensions'].update(dimensions) - - log_meta = { - 'region': region, - 'tenantId': tenant_id - } - - return cls(log, log_meta) - - @property - def log(self): - return self.get('log', None) - - @property - def creation_time(self): - return self.get('creation_time', None) - - @property - def meta(self): - return self.get('meta', None) diff --git a/monasca_api/api/core/log/validation.py b/monasca_api/api/core/log/validation.py deleted file mode 100644 index ea3e99602..000000000 --- a/monasca_api/api/core/log/validation.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -import falcon -from oslo_log import log -import six - -from monasca_api.api.core.log import exceptions -from monasca_api import conf - -LOG = log.getLogger(__name__) -CONF = conf.CONF - -APPLICATION_TYPE_CONSTRAINTS = { - 'MAX_LENGTH': 255, - 'PATTERN': re.compile('^[a-zA-Z0-9_.\\-]+$') -} -"""Application type constraint used in validation. - -See :py:func:`Validations.validate_application_type` -""" -DIMENSION_NAME_CONSTRAINTS = { - 'MAX_LENGTH': 255, - 'PATTERN': re.compile('[^><={}(), \'";&]+$') -} -"""Constraint for name of single dimension. - -See :py:func:`Validations.validate_dimensions` -""" -DIMENSION_VALUE_CONSTRAINTS = { - 'MAX_LENGTH': 255 -} -"""Constraint for value of single dimension. - -See :py:func:`Validations.validate_dimensions` -""" - - -def validate_application_type(application_type=None): - """Validates application type. - - Validation won't take place if application_type is None. - For details see: :py:data:`APPLICATION_TYPE_CONSTRAINTS` - - :param str application_type: application type - """ - - def validate_length(): - if (len(application_type) > - APPLICATION_TYPE_CONSTRAINTS['MAX_LENGTH']): - msg = ('Application type {type} must be ' - '{length} characters or less') - raise exceptions.HTTPUnprocessableEntity( - msg.format( - type=application_type, - length=APPLICATION_TYPE_CONSTRAINTS[ - 'MAX_LENGTH'] - ) - ) - - def validate_match(): - if (not APPLICATION_TYPE_CONSTRAINTS['PATTERN'] - .match(application_type)): - raise exceptions.HTTPUnprocessableEntity( - 'Application type %s may only contain: "a-z A-Z 0-9 _ - ."' - % application_type - ) - - if application_type: - validate_length() - validate_match() - - -def _validate_dimension_name(name): - try: - if len(name) > DIMENSION_NAME_CONSTRAINTS['MAX_LENGTH']: - raise exceptions.HTTPUnprocessableEntity( - 'Dimension name %s must be 255 characters or less' % - name - ) - if name[0] == '_': - raise exceptions.HTTPUnprocessableEntity( - 'Dimension name %s cannot start with underscore (_)' % - name - ) - if not DIMENSION_NAME_CONSTRAINTS['PATTERN'].match(name): - raise exceptions.HTTPUnprocessableEntity( - 'Dimension name %s may not contain: %s' % - (name, '> < = { } ( ) \' " , ; &') - ) - except (TypeError, IndexError): - raise exceptions.HTTPUnprocessableEntity( - 'Dimension name cannot be empty' - ) - - -def _validate_dimension_value(value): - try: - value[0] - if len(value) > DIMENSION_VALUE_CONSTRAINTS['MAX_LENGTH']: - raise exceptions.HTTPUnprocessableEntity( - 'Dimension value %s must be 255 characters or less' % - value - ) - except (TypeError, IndexError): - raise exceptions.HTTPUnprocessableEntity( - 'Dimension value cannot be empty' - ) - - -def validate_dimensions(dimensions): - """Validates dimensions type. - - Empty dimensions are not being validated. - For details see: - - :param dict dimensions: dimensions to validate - - * :py:data:`DIMENSION_NAME_CONSTRAINTS` - * :py:data:`DIMENSION_VALUE_CONSTRAINTS` - """ - try: - for dim_name, dim_value in dimensions.items(): - _validate_dimension_name(dim_name) - _validate_dimension_value(dim_value) - except AttributeError: - raise exceptions.HTTPUnprocessableEntity( - 'Dimensions %s must be a dictionary (map)' % dimensions) - - -def validate_content_type(req, allowed): - """Validates content type. - - Method validates request against correct - content type. - - If content-type cannot be established (i.e. header is missing), - :py:class:`falcon.HTTPMissingHeader` is thrown. - If content-type is not **application/json** or **text/plain**, - :py:class:`falcon.HTTPUnsupportedMediaType` is thrown. - - - :param falcon.Request req: current request - :param iterable allowed: allowed content type - - :exception: :py:class:`falcon.HTTPMissingHeader` - :exception: :py:class:`falcon.HTTPUnsupportedMediaType` - """ - content_type = req.content_type - - LOG.debug('Content-Type is %s', content_type) - - if content_type is None or len(content_type) == 0: - raise falcon.HTTPMissingHeader('Content-Type') - - if content_type not in allowed: - sup_types = ', '.join(allowed) - details = ('Only [%s] are accepted as logs representations' - % str(sup_types)) - raise falcon.HTTPUnsupportedMediaType(description=details) - - -def validate_payload_size(req): - """Validates payload size. - - Method validates sent payload size. - It expects that http header **Content-Length** is present. - If it does not, method raises :py:class:`falcon.HTTPLengthRequired`. - Otherwise values is being compared with :: - - [service] - max_log_size = 1048576 - - **max_log_size** refers to the maximum allowed content length. - If it is exceeded :py:class:`falcon.HTTPRequestEntityTooLarge` is - thrown. - - :param falcon.Request req: current request - - :exception: :py:class:`falcon.HTTPLengthRequired` - :exception: :py:class:`falcon.HTTPRequestEntityTooLarge` - - """ - payload_size = req.content_length - max_size = CONF.service.max_log_size - - LOG.debug('Payload (content-length) is %s', str(payload_size)) - - if payload_size is None: - raise falcon.HTTPLengthRequired( - title='Content length header is missing', - description='Content length is required to estimate if ' - 'payload can be processed' - ) - - if payload_size >= max_size: - raise falcon.HTTPPayloadTooLarge( - title='Log payload size exceeded', - description='Maximum allowed size is %d bytes' % max_size - ) - - -def validate_is_delegate(roles): - delegate_roles = CONF.roles_middleware.delegate_roles - if roles and delegate_roles: - roles = roles.split(',') if isinstance(roles, six.string_types) \ - else roles - return any(x in set(delegate_roles) for x in roles) - return False - - -def validate_cross_tenant(tenant_id, cross_tenant_id, roles): - - if not validate_is_delegate(roles): - if cross_tenant_id: - raise falcon.HTTPForbidden( - 'Permission denied', - 'Projects %s cannot POST cross tenant logs' % tenant_id - ) - - -def validate_log_message(log_object): - """Validates log property. - - Log property should have message property. - - Args: - log_object (dict): log property - """ - if 'message' not in log_object: - raise exceptions.HTTPUnprocessableEntity( - 'Log property should have message' - ) diff --git a/monasca_api/api/core/request.py b/monasca_api/api/core/request.py deleted file mode 100644 index ff659d22e..000000000 --- a/monasca_api/api/core/request.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright 2016 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon - -from monasca_api.api.core import request_context -from monasca_api.common.policy import policy_engine as policy -from monasca_api.common.repositories import constants -from monasca_api import policies -from monasca_api.v2.common import exceptions - - -policy.POLICIES = policies - -_PROJECT_ID_PARAM = 'project_id' -"""Name of the query-param pointing at project-id""" - - -class Request(falcon.Request): - """Variation of falcon.Request with context - - Following class enhances :py:class:`falcon.Request` with - :py:class:`context.RequestContext`. - - """ - - def __init__(self, env, options=None): - super(Request, self).__init__(env, options) - self.context = request_context.RequestContext.from_environ(self.env) - - @property - def project_id(self): - """Returns project-id - - :return: project-id - :rtype: str - - """ - return self.context.project_id - - @property - def cross_project_id(self): - """Returns project-id found in query params. - - This particular project-id is later on identified as - cross-project-id - - :return: project-id - :rtype: str - - """ - return self.get_param(_PROJECT_ID_PARAM, required=False) - - @property - def user_id(self): - """Returns user-id - - :return: user-id - :rtype: str - - """ - return self.context.user - - @property - def roles(self): - """Returns roles associated with user - - :return: user's roles - :rtype: list - - """ - return self.context.roles - - @property - def limit(self): - """Returns LIMIT query param value. - - 'limit' is not required query param. - In case it is not found, py:data:'.constants.PAGE_LIMIT' - value is returned. - - :return: value of 'limit' query param or default value - :rtype: int - :raise exceptions.HTTPUnprocessableEntityError: if limit is not valid integer - - """ - limit = self.get_param('limit', required=False, default=None) - if limit is not None: - if limit.isdigit(): - limit = int(limit) - if limit > constants.PAGE_LIMIT: - return constants.PAGE_LIMIT - else: - return limit - else: - err_msg = 'Limit parameter must be a positive integer' - raise exceptions.HTTPUnprocessableEntityError('Invalid limit', err_msg) - else: - return constants.PAGE_LIMIT - - def can(self, action, target=None): - return self.context.can(action, target) - - def __repr__(self): - return '%s, context=%s' % (self.path, self.context.to_dict()) diff --git a/monasca_api/api/core/request_context.py b/monasca_api/api/core/request_context.py deleted file mode 100644 index f8eac0d7a..000000000 --- a/monasca_api/api/core/request_context.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_context import context - -from monasca_api.common.policy import policy_engine as policy -from monasca_api import policies - -policy.POLICIES = policies - - -class RequestContext(context.RequestContext): - """RequestContext. - - RequestContext is customized version of - :py:class:oslo_context.context.RequestContext. - """ - - def can(self, action, target=None): - if target is None: - target = {'project_id': self.project_id, - 'user_id': self.user_id} - - return policy.authorize(self, action=action, target=target) diff --git a/monasca_api/api/healthcheck_api.py b/monasca_api/api/healthcheck_api.py deleted file mode 100644 index d6a9d43e6..000000000 --- a/monasca_api/api/healthcheck_api.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class HealthCheckApi(object): - """HealthCheck Api. - - HealthCheckApi server information regarding health of the Api. - - """ - - def __init__(self): - super(HealthCheckApi, self).__init__() - LOG.info('Initializing HealthCheckApi') - - def on_get(self, req, res): - """Complex healthcheck report on GET - - Returns complex report regarding API health - and all dependent services - - :param falcon.Request req: current request - :param falcon.Response res: current response - """ - res.status = falcon.HTTP_501 # pragma: no cover - - def on_head(self, req, res): - """Simple healthcheck report on HEAD. - - In opposite to :py:meth:`.HealthCheckApi.on_get`, this - method is supposed to execute ASAP to inform user that - API is up and running. - - :param falcon.Request req: current request - :param falcon.Response res: current response - """ - res.status = falcon.HTTP_501 # pragma: no cover diff --git a/monasca_api/api/logs_api.py b/monasca_api/api/logs_api.py deleted file mode 100644 index 3f6419d88..000000000 --- a/monasca_api/api/logs_api.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2015 kornicameister@gmail.com -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from oslo_log import log - - -LOG = log.getLogger(__name__) - - -class LogsApi(object): - """Logs API. - - Logs API acts as RESTful endpoint accepting - messages contains collected log entries from the system. - Works as gateway for any further processing for accepted data. - - """ - def __init__(self): - super(LogsApi, self).__init__() - LOG.info('Initializing LogsApi') - - def on_post(self, req, res): - """Accepts sent logs as text or json. - - Accepts logs sent to resource which should - be sent to kafka queue. - - :param req: current request - :param res: current response - - """ - res.status = falcon.HTTP_501 # pragma: no cover - - def on_get(self, req, res): - """Queries logs matching specified dimension values. - - Performs queries on the underlying log storage - against a time range and set of dimension values. - - :param req: current request - :param res: current response - - """ - res.status = falcon.HTTP_501 # pragma: no cover - - @property - def version(self): - return getattr(self, 'VERSION') diff --git a/monasca_api/api/metrics_api_v2.py b/monasca_api/api/metrics_api_v2.py deleted file mode 100644 index 5d9034a06..000000000 --- a/monasca_api/api/metrics_api_v2.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2014 IBM Corp -# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class MetricsV2API(object): - def __init__(self): - super(MetricsV2API, self).__init__() - LOG.info('Initializing MetricsV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - def on_post(self, req, res): - res.status = '501 Not Implemented' - - -class MetricsMeasurementsV2API(object): - def __init__(self): - super(MetricsMeasurementsV2API, self).__init__() - LOG.info('Initializing MetricsMeasurementsV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - -class MetricsStatisticsV2API(object): - def __init__(self): - super(MetricsStatisticsV2API, self).__init__() - LOG.info('Initializing MetricsStatisticsV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - -class MetricsNamesV2API(object): - def __init__(self): - super(MetricsNamesV2API, self).__init__() - LOG.info('Initializing MetricsNamesV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - -class DimensionValuesV2API(object): - def __init__(self): - super(DimensionValuesV2API, self).__init__() - LOG.info('Initializing DimensionValuesV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - -class DimensionNamesV2API(object): - def __init__(self): - super(DimensionNamesV2API, self).__init__() - LOG.info('Initializing DimensionNamesV2API!') - - def on_get(self, req, res): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/notifications_api_v2.py b/monasca_api/api/notifications_api_v2.py deleted file mode 100644 index 2b1899d26..000000000 --- a/monasca_api/api/notifications_api_v2.py +++ /dev/null @@ -1,38 +0,0 @@ -# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class NotificationsV2API(object): - def __init__(self): - super(NotificationsV2API, self).__init__() - LOG.info('Initializing NotificationsV2API!') - - def on_post(self, req, res): - res.status = '501 Not Implemented' - - def on_delete(self, req, res, notification_method_id): - res.status = '501 Not Implemented' - - def on_get(self, req, res, notification_method_id): - res.status = '501 Not Implemented' - - def on_put(self, req, res, notification_method_id): - res.status = '501 Not Implemented' - - def on_patch(self, req, res, notification_method_id): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/notificationstype_api_v2.py b/monasca_api/api/notificationstype_api_v2.py deleted file mode 100644 index 311e582ae..000000000 --- a/monasca_api/api/notificationstype_api_v2.py +++ /dev/null @@ -1,38 +0,0 @@ -# (C) Copyright 2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class NotificationsTypeV2API(object): - def __init__(self): - super(NotificationsTypeV2API, self).__init__() - LOG.info('Initializing NotificationsTypeV2API!') - - def on_post(self, req, res): - res.status = '501 Not Implemented' - - def on_delete(self, req, res): - res.status = '501 Not Implemented' - - def on_get(self, req, res): - res.status = '501 Not Implemented' - - def on_put(self, req, res): - res.status = '501 Not Implemented' - - def on_patch(self, req, res): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/server.py b/monasca_api/api/server.py deleted file mode 100644 index 08e328312..000000000 --- a/monasca_api/api/server.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2014 IBM Corp -# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import falcon -from monasca_common.simport import simport -from oslo_config import cfg -from oslo_log import log -import paste.deploy - -from monasca_api.api.core import request -from monasca_api import config - -LOG = log.getLogger(__name__) -CONF = config.CONF - - -def launch(conf): - config.parse_args() - - app = falcon.API(request_type=request.Request) - # NOTE(dszumski): Falcon 2.0.0 switches the default for this from True - # to False so we explicitly set it here to prevent the behaviour - # changing between versions. - app.req_options.strip_url_path_trailing_slash = True - - versions = simport.load(cfg.CONF.dispatcher.versions)() - app.add_route("/", versions) - app.add_route("/{version_id}", versions) - - # The following resource is a workaround for a regression in falcon 0.3 - # which causes the path '/v2.0' to not route to the versions resource - version_2_0 = simport.load(cfg.CONF.dispatcher.version_2_0)() - app.add_route("/v2.0", version_2_0) - - healthchecks = simport.load(cfg.CONF.dispatcher.healthchecks)() - app.add_route("/healthcheck", healthchecks) - - if cfg.CONF.enable_metrics_api: - launch_metrics_api(app) - - if cfg.CONF.enable_logs_api: - launch_log_api(app) - - LOG.debug('Dispatcher drivers have been added to the routes!') - return app - - -def launch_metrics_api(app): - metrics = simport.load(cfg.CONF.dispatcher.metrics)() - app.add_route("/v2.0/metrics", metrics) - - metrics_measurements = simport.load( - cfg.CONF.dispatcher.metrics_measurements)() - app.add_route("/v2.0/metrics/measurements", metrics_measurements) - - metrics_statistics = simport.load(cfg.CONF.dispatcher.metrics_statistics)() - app.add_route("/v2.0/metrics/statistics", metrics_statistics) - - metrics_names = simport.load(cfg.CONF.dispatcher.metrics_names)() - app.add_route("/v2.0/metrics/names", metrics_names) - - alarm_definitions = simport.load(cfg.CONF.dispatcher.alarm_definitions)() - app.add_route("/v2.0/alarm-definitions", alarm_definitions) - app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}", - alarm_definitions) - - alarms = simport.load(cfg.CONF.dispatcher.alarms)() - app.add_route("/v2.0/alarms", alarms) - app.add_route("/v2.0/alarms/{alarm_id}", alarms) - - alarm_count = simport.load(cfg.CONF.dispatcher.alarms_count)() - app.add_route("/v2.0/alarms/count", alarm_count) - - alarms_state_history = simport.load( - cfg.CONF.dispatcher.alarms_state_history)() - app.add_route("/v2.0/alarms/state-history", alarms_state_history) - app.add_route("/v2.0/alarms/{alarm_id}/state-history", - alarms_state_history) - - notification_methods = simport.load( - cfg.CONF.dispatcher.notification_methods)() - app.add_route("/v2.0/notification-methods", notification_methods) - app.add_route("/v2.0/notification-methods/{notification_method_id}", - notification_methods) - - dimension_values = simport.load(cfg.CONF.dispatcher.dimension_values)() - app.add_route("/v2.0/metrics/dimensions/names/values", dimension_values) - - dimension_names = simport.load(cfg.CONF.dispatcher.dimension_names)() - app.add_route("/v2.0/metrics/dimensions/names", dimension_names) - - notification_method_types = simport.load( - cfg.CONF.dispatcher.notification_method_types)() - app.add_route("/v2.0/notification-methods/types", notification_method_types) - - -def launch_log_api(app): - logs = simport.load( - cfg.CONF.dispatcher.logs)() - app.add_route("/v2.0/logs", logs) - - -def get_wsgi_app(config_base_path=None, **kwargs): - - # allow to override names of the configuration files - config_file = kwargs.get('config_file', 'monasca-api.conf') - paste_file = kwargs.get('paste_file', 'api-config.ini') - - if config_base_path is None: - # allow monasca-api to be run in dev mode from __main__ - config_base_path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), '../../etc') - - config_file = os.path.join(config_base_path, config_file) - global_conf = {'config_file': config_file} - - LOG.debug('Initializing WSGI application using configuration from %s', - config_base_path) - - return ( - paste.deploy.loadapp( - 'config:%s' % paste_file, - relative_to=config_base_path, - global_conf=global_conf - ) - ) - - -if __name__ == '__main__': - from wsgiref import simple_server - wsgi_app = get_wsgi_app() - httpd = simple_server.make_server('127.0.0.1', 8070, wsgi_app) - httpd.serve_forever() diff --git a/monasca_api/api/versions_api.py b/monasca_api/api/versions_api.py deleted file mode 100644 index 405f654c1..000000000 --- a/monasca_api/api/versions_api.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2015 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class VersionsAPI(object): - def __init__(self): - super(VersionsAPI, self).__init__() - LOG.info('Initializing VersionsAPI!') - - def on_get(self, req, res, id): - res.status = '501 Not Implemented' diff --git a/monasca_api/api/wsgi.py b/monasca_api/api/wsgi.py deleted file mode 100644 index 467bb978f..000000000 --- a/monasca_api/api/wsgi.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# extremely simple way to setup of monasca-api -# with wsgi - -from monasca_api.api import server - - -def main(): - return server.get_wsgi_app(config_base_path='/etc/monasca') - - -if __name__ == '__main__' or __name__.startswith('_mod_wsgi'): - application = main() diff --git a/monasca_api/cmd/__init__.py b/monasca_api/cmd/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/cmd/monasca_db.py b/monasca_api/cmd/monasca_db.py deleted file mode 100644 index 880418fbb..000000000 --- a/monasca_api/cmd/monasca_db.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" - CLI interface for monasca database management. -""" - -from oslo_config import cfg -from oslo_db.sqlalchemy.migration_cli.ext_alembic import AlembicExtension - -from monasca_api.common.repositories.sqla import sql_repository -from monasca_api import conf -from monasca_api.db.alembic import env -from monasca_api.db.fingerprint import Fingerprint -from monasca_api import version - -import monasca_api.config - -import sys - -CONF = cfg.CONF - -_FP_NOREVISION = ("Schema fingerprint %s does not match any known legacy " - "revision.") - -migration_config = {'alembic_ini_path': env.ini_file_path} - - -def do_detect_revision(): - fingerprint = Fingerprint(sql_repository.get_engine()) - - if fingerprint.revision is None: - print(_FP_NOREVISION % fingerprint.sha256) - sys.exit(1) - else: - print(fingerprint.revision) - - -def do_fingerprint(): - fingerprint = Fingerprint(sql_repository.get_engine()) - if CONF.command.raw: - print(fingerprint.schema_raw, end="") - else: - print(fingerprint.sha256) - - -def do_stamp(): - rev = CONF.command.revision - from_fingerprint = CONF.command.from_fingerprint - - engine = sql_repository.get_engine() - alembic_ext = AlembicExtension(engine, migration_config) - - if rev is None: - if from_fingerprint is False: - print("No revision specified. Specify --from-fingerprint to " - "attempt a guess based on the current database schema's " - "fingerprint.") - sys.exit(1) - else: - fp = Fingerprint(engine) - if fp.revision is None: - print(_FP_NOREVISION % fp.sha256) - sys.exit(1) - rev = fp.revision - - alembic_ext.stamp(rev) - - -def do_upgrade(): - engine = sql_repository.get_engine() - alembic_ext = AlembicExtension(engine, migration_config) - - rev = CONF.command.revision - db_rev = alembic_ext.version() - - fp = Fingerprint(engine) - - if fp.schema_raw != "" and db_rev is None: - print("Non-empty database schema without Alembic version metadata " - "detected. Please use the `stamp` subcommand to add version " - "metadata.") - sys.exit(1) - - alembic_ext.upgrade(rev) - - -def do_version(): - engine = sql_repository.get_engine() - alembic_ext = AlembicExtension(engine, migration_config) - - version = alembic_ext.version() - if version is None: - print("Cannot determine version. Check if this database has Alembic " - "version information. ") - sys.exit(1) - print(version) - - -def add_command_parsers(subparsers): - parser = subparsers.add_parser('fingerprint', - help="Compute SHA256 fingerprint of " - "current database schema ") - parser.add_argument('-r', '--raw', action='store_true', - help='Print raw schema dump used for ' - 'fingerprinting') - parser.set_defaults(func=do_fingerprint) - - parser = subparsers.add_parser('detect-revision', - help="Attempt to detect revision " - "matching current database " - " schema ") - parser.set_defaults(func=do_detect_revision) - - parser = subparsers.add_parser('stamp', help='Stamp database with an ' - 'Alembic revision') - parser.add_argument('revision', nargs='?', metavar='VERSION', - help='Revision to stamp database with', - default=None) - parser.add_argument('-f', '--from-fingerprint', action='store_true', - help='Try to determine VERSION from fingerprint') - parser.set_defaults(func=do_stamp) - - parser = subparsers.add_parser('upgrade', - help='Upgrade database to given or ' - 'latest revision') - parser.add_argument('revision', metavar='VERSION', nargs='?', - help='Alembic revision to upgrade database to', - default='head') - parser.add_argument('-f', '--from-fingerprint', action='store_true', - help='Try to determine VERSION from fingerprint') - parser.set_defaults(func=do_upgrade) - - parser = subparsers.add_parser('version', help="Show database's current Alembic version") - parser.set_defaults(func=do_version) - - -command_opt = cfg.SubCommandOpt('command', - title='Monasca DB manager', - help='Available commands', - handler=add_command_parsers) - - -def main(): - CONF.register_cli_opt(command_opt) - CONF(args=sys.argv[1:], - default_config_files=monasca_api.config.get_config_files(), - prog='api', - project='monasca', - version=version.version_str) - - conf.register_opts() - - CONF.command.func() diff --git a/monasca_api/cmd/status.py b/monasca_api/cmd/status.py deleted file mode 100644 index b27fb7f27..000000000 --- a/monasca_api/cmd/status.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2018 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -CLI interface for monasca status commands. -https://governance.openstack.org/tc/goals/stein/upgrade-checkers.html -""" - -import sys - -from oslo_config import cfg -from oslo_upgradecheck import common_checks -from oslo_upgradecheck import upgradecheck - - -def _(message): - # TODO(joadavis): simplified localization, Monasca not using oslo_i18n - return message - - -class Checks(upgradecheck.UpgradeCommands): - - """Various upgrade checks should be added as separate methods in this class - and added to _upgrade_checks tuple. - """ - - # The format of the check functions is to return an - # oslo_upgradecheck.upgradecheck.Result - # object with the appropriate - # oslo_upgradecheck.upgradecheck.Code and details set. - # If the check hits warnings or failures then those should be stored - # in the returned Result's "details" attribute. The - # summary will be rolled up at the end of the check() method. - _upgrade_checks = ( - (_('Policy File JSON to YAML Migration'), - (common_checks.check_policy_json, {'conf': cfg.CONF})), - ) - - -def main(): - return upgradecheck.main( - cfg.CONF, project='monasca', upgrade_command=Checks()) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/monasca_api/common/__init__.py b/monasca_api/common/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/messaging/__init__.py b/monasca_api/common/messaging/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/messaging/exceptions.py b/monasca_api/common/messaging/exceptions.py deleted file mode 100644 index 0a0450b9a..000000000 --- a/monasca_api/common/messaging/exceptions.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class MessageQueueException(Exception): - pass diff --git a/monasca_api/common/messaging/fake_publisher.py b/monasca_api/common/messaging/fake_publisher.py deleted file mode 100644 index b0807de32..000000000 --- a/monasca_api/common/messaging/fake_publisher.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_api.common.messaging import publisher - - -class FakePublisher(publisher.Publisher): - - def __init__(self, topic): - pass - - def send_message(self, message): - pass diff --git a/monasca_api/common/messaging/kafka_publisher.py b/monasca_api/common/messaging/kafka_publisher.py deleted file mode 100644 index 21eb9d250..000000000 --- a/monasca_api/common/messaging/kafka_publisher.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2014,2017 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_common.kafka import client_factory -import monasca_common.kafka_lib.common as kafka_common -from oslo_config import cfg -from oslo_log import log - -from monasca_api.common.messaging import exceptions -from monasca_api.common.messaging import publisher - -LOG = log.getLogger(__name__) - - -class KafkaPublisher(publisher.Publisher): - def __init__(self, topic): - if not cfg.CONF.kafka.uri: - raise Exception('Kafka is not configured correctly! ' - 'Use configuration file to specify Kafka ' - 'uri, for example: ' - 'uri=192.168.1.191:9092') - - self.uri = cfg.CONF.kafka.uri - self.topic = topic - self.group = cfg.CONF.kafka.group - self.wait_time = cfg.CONF.kafka.wait_time - self.is_async = cfg.CONF.kafka.is_async - self.ack_time = cfg.CONF.kafka.ack_time - self.max_retry = cfg.CONF.kafka.max_retry - self.auto_commit = cfg.CONF.kafka.auto_commit - self.compact = cfg.CONF.kafka.compact - self.partitions = cfg.CONF.kafka.partitions - self.drop_data = cfg.CONF.kafka.drop_data - - config = {'queue.buffering.max.messages': - cfg.CONF.kafka.queue_buffering_max_messages} - self._producer = client_factory.get_kafka_producer( - self.uri, cfg.CONF.kafka.legacy_kafka_client_enabled, **config) - - def close(self): - pass - - def send_message(self, message): - try: - self._producer.publish(self.topic, message) - - except (kafka_common.KafkaUnavailableError, - kafka_common.LeaderNotAvailableError): - LOG.exception('Error occurred while posting data to Kafka.') - raise exceptions.MessageQueueException() - except Exception: - LOG.exception('Unknown error.') - raise exceptions.MessageQueueException() diff --git a/monasca_api/common/messaging/message_formats/__init__.py b/monasca_api/common/messaging/message_formats/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/messaging/message_formats/metrics.py b/monasca_api/common/messaging/message_formats/metrics.py deleted file mode 100644 index dd66e173d..000000000 --- a/monasca_api/common/messaging/message_formats/metrics.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils - -from monasca_api.common.rest import utils as rest_utils - - -def transform(metrics, tenant_id, region): - transformed_metric = {'metric': {}, - 'meta': {'tenantId': tenant_id, 'region': region}, - 'creation_time': timeutils.utcnow_ts()} - - if isinstance(metrics, list): - transformed_metrics = [] - for metric in metrics: - transformed_metric['metric'] = metric - transformed_metrics.append(rest_utils.as_json(transformed_metric)) - return transformed_metrics - else: - transformed_metric['metric'] = metrics - return [rest_utils.as_json(transformed_metric)] diff --git a/monasca_api/common/messaging/publisher.py b/monasca_api/common/messaging/publisher.py deleted file mode 100644 index 8456a2f01..000000000 --- a/monasca_api/common/messaging/publisher.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class Publisher(object): - @abc.abstractmethod - def send_message(self, message): - return diff --git a/monasca_api/common/policy/__init__.py b/monasca_api/common/policy/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/policy/i18n.py b/monasca_api/common/policy/i18n.py deleted file mode 100644 index 8179216a1..000000000 --- a/monasca_api/common/policy/i18n.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oslo.i18n integration module. - -See https://docs.openstack.org/oslo.i18n/latest/user/index.html - -""" - -import oslo_i18n - -DOMAIN = 'monasca' - -_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN) - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = _translators.log_info -_LW = _translators.log_warning -_LE = _translators.log_error -_LC = _translators.log_critical - - -def translate(value, user_locale): - return oslo_i18n.translate(value, user_locale) - - -def get_available_languages(): - return oslo_i18n.get_available_languages(DOMAIN) diff --git a/monasca_api/common/policy/policy_engine.py b/monasca_api/common/policy/policy_engine.py deleted file mode 100644 index 3f9d4abdc..000000000 --- a/monasca_api/common/policy/policy_engine.py +++ /dev/null @@ -1,256 +0,0 @@ -# Copyright 2017 OP5 AB -# Copyright 2017 FUJITSU LIMITED -# Copyright (c) 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import copy -import re -import sys - -import logging -from oslo_config import cfg -from oslo_policy import opts -from oslo_policy import policy - -from monasca_api.common.policy.i18n import _LW - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) -POLICIES = None -USER_BASED_RESOURCES = ['os-keypairs'] -KEY_EXPR = re.compile(r'%\((\w+)\)s') - - -_ENFORCER = None -# oslo_policy will read the policy configuration file again when the file -# is changed in runtime so the old policy rules will be saved to -# saved_file_rules and used to compare with new rules to determine -# whether the rules were updated. -saved_file_rules = [] - - -# TODO(gmann): Remove setting the default value of config policy_file -# once oslo_policy change the default value to 'policy.yaml'. -# https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49 -DEFAULT_POLICY_FILE = 'policy.yaml' -opts.set_defaults(cfg.CONF, DEFAULT_POLICY_FILE) - - -def reset(): - """Reset Enforcer class.""" - global _ENFORCER - if _ENFORCER: - _ENFORCER.clear() - _ENFORCER = None - - -def init(policy_file=None, rules=None, default_rule=None, use_conf=True): - """Init an Enforcer class. - - :param policy_file: Custom policy file to use, if none is specified, - `CONF.policy_file` will be used. - :param rules: Default dictionary / Rules to use. It will be - considered just in the first instantiation. - :param default_rule: Default rule to use, CONF.default_rule will - be used if none is specified. - :param use_conf: Whether to load rules from config file. - """ - - global _ENFORCER - global saved_file_rules - - if not _ENFORCER: - _ENFORCER = policy.Enforcer(CONF, - policy_file=policy_file, - rules=rules, - default_rule=default_rule, - use_conf=use_conf - ) - register_rules(_ENFORCER) - _ENFORCER.load_rules() - # Only the rules which are loaded from file may be changed - current_file_rules = _ENFORCER.file_rules - current_file_rules = _serialize_rules(current_file_rules) - - if saved_file_rules != current_file_rules: - _warning_for_deprecated_user_based_rules(current_file_rules) - saved_file_rules = copy.deepcopy(current_file_rules) - - -def _serialize_rules(rules): - """Serialize all the Rule object as string. - - New string is used to compare the rules list. - """ - result = [(rule_name, str(rule)) for rule_name, rule in rules.items()] - return sorted(result, key=lambda rule: rule[0]) - - -def _warning_for_deprecated_user_based_rules(rules): - """Warning user based policy enforcement used in the rule but the rule - doesn't support it. - """ - for rule in rules: - # We will skip the warning for the resources which support user based - # policy enforcement. - if [resource for resource in USER_BASED_RESOURCES - if resource in rule[0]]: - continue - if 'user_id' in KEY_EXPR.findall(rule[1]): - LOG.warning(_LW("The user_id attribute isn't supported in the " - "rule '%s'. All the user_id based policy " - "enforcement will be removed in the " - "future."), rule[0]) - - -def register_rules(enforcer): - """Register default policy rules.""" - rules = POLICIES.list_rules() - enforcer.register_defaults(rules) - - -def authorize(context, action, target, do_raise=True): - """Verify that the action is valid on the target in this context. - - :param context: monasca project context - :param action: String representing the action to be checked. This - should be colon separated for clarity. - :param target: Dictionary representing the object of the action for - object creation. This should be a dictionary representing - the location of the object e.g. - ``{'project_id': 'context.project_id'}`` - :param do_raise: if True (the default), raises PolicyNotAuthorized, - if False returns False - :type context: object - :type action: str - :type target: dict - :type do_raise: bool - :return: returns a non-False value (not necessarily True) if authorized, - and the False if not authorized and do_raise if False - - :raises oslo_policy.policy.PolicyNotAuthorized: if verification fails - """ - init() - credentials = context.to_policy_values() - try: - result = _ENFORCER.authorize(action, target, credentials, - do_raise=do_raise, action=action) - return result - except policy.PolicyNotRegistered: - LOG.exception('Policy not registered') - raise - except Exception: - LOG.debug('Policy check for %(action)s failed with credentials ' - '%(credentials)s', - {'action': action, 'credentials': credentials}) - raise - - -def check_is_admin(context): - """Check if roles contains 'admin' role according to policy settings.""" - init() - credentials = context.to_policy_values() - target = credentials - return _ENFORCER.authorize('admin_required', target, credentials) - - -def set_rules(rules, overwrite=True, use_conf=False): # pragma: no cover - """Set rules based on the provided dict of rules. - - Note: - Used in tests only. - - :param rules: New rules to use. It should be an instance of dict - :param overwrite: Whether to overwrite current rules or update them - with the new rules. - :param use_conf: Whether to reload rules from config file. - """ - init(use_conf=False) - _ENFORCER.set_rules(rules, overwrite, use_conf) - - -def verify_deprecated_policy(old_policy, new_policy, default_rule, context): - """Check the rule of the deprecated policy action - - If the current rule of the deprecated policy action is set to a non-default - value, then a warning message is logged stating that the new policy - action should be used to dictate permissions as the old policy action is - being deprecated. - - :param old_policy: policy action that is being deprecated - :param new_policy: policy action that is replacing old_policy - :param default_rule: the old_policy action default rule value - :param context: the monasca context - """ - - if _ENFORCER: - current_rule = str(_ENFORCER.rules[old_policy]) - else: - current_rule = None - - if current_rule != default_rule: - LOG.warning("Start using the new action '{0}'. The existing " - "action '{1}' is being deprecated and will be " - "removed in future release.".format(new_policy, - old_policy)) - target = {'project_id': context.project_id, - 'user_id': context.user_id} - - return authorize(context=context, action=old_policy, target=target) - else: - return False - - -def get_rules(): - if _ENFORCER: - return _ENFORCER.rules - - -def get_enforcer(): - # This method is for use by oslopolicy CLI scripts. Those scripts need the - # 'output-file' and 'namespace' options, but having those in sys.argv means - # loading the project config options will fail as those are not expected to - # be present. So we pass in an arg list with those stripped out. - conf_args = [] - # Start at 1 because cfg.CONF expects the equivalent of sys.argv[1:] - i = 1 - while i < len(sys.argv): - if sys.argv[i].strip('-') in ['namespace', 'output-file']: - i += 2 - continue - conf_args.append(sys.argv[i]) - i += 1 - - cfg.CONF(conf_args, project='monasca') - init() - return _ENFORCER - - -@policy.register('is_admin') -class IsAdminCheck(policy.Check): - """An explicit check for is_admin.""" - - def __init__(self, kind, match): - """Initialize the check.""" - - self.expected = (match.lower() == 'true') - - super(IsAdminCheck, self).__init__(kind, str(self.expected)) - - def __call__(self, target, creds, enforcer): - """Determine whether is_admin matches the requested value.""" - - return creds['is_admin'] == self.expected diff --git a/monasca_api/common/repositories/__init__.py b/monasca_api/common/repositories/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/alarm_definitions_repository.py b/monasca_api/common/repositories/alarm_definitions_repository.py deleted file mode 100644 index 90956eb7d..000000000 --- a/monasca_api/common/repositories/alarm_definitions_repository.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2014,2016 Hewlett Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class AlarmDefinitionsRepository(object): - def __init__(self): - super(AlarmDefinitionsRepository, self).__init__() - - @abc.abstractmethod - def create_alarm_definition(self, tenant_id, name, expression, - sub_expr_list, description, severity, match_by, - alarm_actions, undetermined_actions, - ok_action): - pass - - @abc.abstractmethod - def get_sub_alarms(self, tenant_id, alarm_definition_id): - pass - - @abc.abstractmethod - def get_alarm_metrics(self, tenant_id, alarm_definition_id): - pass - - @abc.abstractmethod - def delete_alarm_definition(self, tenant_id, alarm_definition_id): - pass - - @abc.abstractmethod - def get_sub_alarm_definitions(self, alarm_definition_id): - pass - - @abc.abstractmethod - def get_alarm_definition(self, tenant_id, id): - pass - - @abc.abstractmethod - def get_alarm_definitions(self, tenant_id, name, dimensions, severity, sort_by, - offset, limit): - pass - - @abc.abstractmethod - def update_or_patch_alarm_definition(self, tenant_id, id, - name, - expression, - sub_expr_list, - actions_enabled, - description, - alarm_actions, - ok_actions, - undetermined_actions, - match_by, severity, patch): - pass diff --git a/monasca_api/common/repositories/alarms_repository.py b/monasca_api/common/repositories/alarms_repository.py deleted file mode 100644 index 683fca283..000000000 --- a/monasca_api/common/repositories/alarms_repository.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class AlarmsRepository(object): - - def __init__(self): - - super(AlarmsRepository, self).__init__() - - @abc.abstractmethod - def get_alarm_metrics(self, alarm_id): - pass - - @abc.abstractmethod - def get_sub_alarms(self, tenant_id, alarm_id): - pass - - @abc.abstractmethod - def update_alarm(self, tenant_id, alarm_id, state, lifecycle_state, link): - pass - - @abc.abstractmethod - def delete_alarm(self, tenant_id, id): - pass - - @abc.abstractmethod - def get_alarm(self, tenant_id, id): - pass - - @abc.abstractmethod - def get_alarms(self, tenant_id, query_parms, offset, limit): - pass - - @abc.abstractmethod - def get_alarms_count(self, tenant_id, query_parms, offset, limit): - pass diff --git a/monasca_api/common/repositories/cassandra/__init__.py b/monasca_api/common/repositories/cassandra/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/cassandra/metrics_repository.py b/monasca_api/common/repositories/cassandra/metrics_repository.py deleted file mode 100644 index 955d0c7d1..000000000 --- a/monasca_api/common/repositories/cassandra/metrics_repository.py +++ /dev/null @@ -1,1091 +0,0 @@ -# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development Company LP -# (C) Copyright 2017-2018 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import binascii -from collections import namedtuple -from datetime import datetime -from datetime import timedelta -import itertools -import six -import urllib - -from cassandra.auth import PlainTextAuthProvider -from cassandra.cluster import Cluster -from cassandra.cluster import DCAwareRoundRobinPolicy -from cassandra.cluster import TokenAwarePolicy -from cassandra.query import FETCH_SIZE_UNSET -from cassandra.query import SimpleStatement -from monasca_api.common.rest import utils as rest_utils -from oslo_config import cfg -from oslo_log import log -from oslo_utils import encodeutils -from oslo_utils import timeutils - -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories import metrics_repository - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -LIMIT_CLAUSE = 'limit %s' -ALLOW_FILTERING = 'allow filtering' - -MEASUREMENT_LIST_CQL = ('select time_stamp, value, value_meta ' - 'from measurements where %s %s %s %s') -METRIC_ID_EQ = 'metric_id = %s' -METRIC_ID_IN = 'metric_id in %s' -OFFSET_TIME_GT = "and time_stamp > %s" -START_TIME_GE = "and time_stamp >= %s" -END_TIME_LE = "and time_stamp <= %s" - -METRIC_LIST_CQL = ('select metric_name, dimensions, metric_id ' - 'from metrics where %s %s %s %s %s %s %s %s %s %s') -REGION_EQ = 'region = %s' -TENANT_EQ = 'and tenant_id = %s' -METRIC_NAME_EQ = 'and metric_name = %s' -DIMENSIONS_CONTAINS = 'and dimensions contains %s ' -DIMENSIONS_NAME_CONTAINS = 'and dimension_names contains %s ' -CREATED_TIME_LE = "and created_at <= %s" -UPDATED_TIME_GE = "and updated_at >= %s" -DIMENSIONS_GT = 'and dimensions > %s' - -DIMENSION_VALUE_BY_METRIC_CQL = ('select dimension_value as value from metrics_dimensions ' - 'where region = ? and tenant_id = ? and metric_name = ? ' - 'and dimension_name = ? group by dimension_value') - -DIMENSION_VALUE_CQL = ('select value from dimensions ' - 'where region = ? and tenant_id = ? and name = ? ' - 'group by value order by value') - -DIMENSION_NAME_BY_METRIC_CQL = ('select dimension_name as name from metrics_dimensions where ' - 'region = ? and tenant_id = ? and metric_name = ? ' - 'group by dimension_name order by dimension_name') - -DIMENSION_NAME_CQL = ('select name from dimensions where region = ? and tenant_id = ? ' - 'group by name allow filtering') - -METRIC_NAME_BY_DIMENSION_CQL = ('select metric_name from dimensions_metrics where region = ? and ' - 'tenant_id = ? and dimension_name = ? and dimension_value = ? ' - 'group by metric_name order by metric_name') - -METRIC_NAME_BY_DIMENSION_OFFSET_CQL = ( - 'select metric_name from dimensions_metrics where region = ? and ' - 'tenant_id = ? and dimension_name = ? and dimension_value = ? and ' - 'metric_name >= ?' - 'group by metric_name order by metric_name') - -METRIC_NAME_CQL = ('select distinct region, tenant_id, metric_name from metrics_dimensions ' - 'where region = ? and tenant_id = ? allow filtering') - -METRIC_NAME_OFFSET_CQL = ('select distinct region, tenant_id, metric_name from metrics_dimensions ' - 'where region = ? and tenant_id = ? and metric_name >= ? allow filtering') - -METRIC_BY_ID_CQL = ('select region, tenant_id, metric_name, dimensions from measurements ' - 'where metric_id = ? limit 1') - -Metric = namedtuple('metric', 'id name dimensions') - -ALARM_HISTORY_CQL = ( - 'select tenant_id, alarm_id, time_stamp, metric, new_state, old_state, reason, reason_data, ' - 'sub_alarms from alarm_state_history where %s %s %s %s %s') - -ALARM_ID_EQ = 'and alarm_id = %s' - -ALARM_ID_IN = 'and alarm_id in %s' - -ALARM_TENANT_ID_EQ = 'tenant_id = %s' - - -class MetricsRepository(metrics_repository.AbstractMetricsRepository): - def __init__(self): - - try: - self.conf = cfg.CONF - - if self.conf.cassandra.user: - auth_provider = PlainTextAuthProvider(username=self.conf.cassandra.user, - password=self.conf.cassandra.password) - else: - auth_provider = None - - self.cluster = Cluster(self.conf.cassandra.contact_points, - port=self.conf.cassandra.port, - auth_provider=auth_provider, - connect_timeout=self.conf.cassandra.connection_timeout, - load_balancing_policy=TokenAwarePolicy( - DCAwareRoundRobinPolicy( - local_dc=self.conf.cassandra.local_data_center)) - ) - self.session = self.cluster.connect(self.conf.cassandra.keyspace) - - self.dim_val_by_metric_stmt = self.session.prepare(DIMENSION_VALUE_BY_METRIC_CQL) - - self.dim_val_stmt = self.session.prepare(DIMENSION_VALUE_CQL) - - self.dim_name_by_metric_stmt = self.session.prepare(DIMENSION_NAME_BY_METRIC_CQL) - - self.dim_name_stmt = self.session.prepare(DIMENSION_NAME_CQL) - - self.metric_name_by_dimension_stmt = self.session.prepare(METRIC_NAME_BY_DIMENSION_CQL) - - self.metric_name_by_dimension_offset_stmt = self.session.prepare( - METRIC_NAME_BY_DIMENSION_OFFSET_CQL) - - self.metric_name_stmt = self.session.prepare(METRIC_NAME_CQL) - - self.metric_name_offset_stmt = self.session.prepare(METRIC_NAME_OFFSET_CQL) - - self.metric_by_id_stmt = self.session.prepare(METRIC_BY_ID_CQL) - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - self.epoch = datetime.utcfromtimestamp(0) - - def list_dimension_values(self, tenant_id, region, metric_name, - dimension_name, start_timestamp=None, - end_timestamp=None): - - if start_timestamp or end_timestamp: - # NOTE(brtknr): For more details, see story - # https://storyboard.openstack.org/#!/story/2006204 - LOG.info("Scoping by timestamp not implemented for cassandra.") - - try: - if metric_name: - rows = self.session.execute( - self.dim_val_by_metric_stmt, - [region, tenant_id, metric_name, dimension_name]) - else: - rows = self.session.execute( - self.dim_val_stmt, - [region, tenant_id, dimension_name]) - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - json_dim_value_list = [] - - if not rows: - return json_dim_value_list - - for row in rows: - json_dim_value_list.append({u'dimension_value': row.value}) - - json_dim_value_list.sort(key=lambda x: x[u'dimension_value']) - - return json_dim_value_list - - def list_dimension_names(self, tenant_id, region, metric_name, - start_timestamp=None, end_timestamp=None): - - if start_timestamp or end_timestamp: - # NOTE(brtknr): For more details, see story - # https://storyboard.openstack.org/#!/story/2006204 - LOG.info("Scoping by timestamp not implemented for cassandra.") - - try: - if metric_name: - rows = self.session.execute( - self.dim_name_by_metric_stmt, - [region, tenant_id, metric_name]) - ordered = True - else: - rows = self.session.execute( - self.dim_name_stmt, - [region, tenant_id]) - ordered = False - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - if not rows: - return [] - - json_dim_name_list = [{u'dimension_name': row.name} for row in rows] - - if not ordered: - json_dim_name_list.sort(key=lambda x: x[u'dimension_name']) - - return json_dim_name_list - - def list_metrics(self, tenant_id, region, name, dimensions, offset, limit, start_time=None, - end_time=None): - - offset_name = None - offset_dimensions = [] - names = [] - metric_list = [] - offset_futures = [] - non_offset_futures = [] - - try: - if offset: - offset_metric = self._get_metric_by_id(offset) - if offset_metric: - offset_name = offset_metric.name - offset_dimensions = offset_metric.dimensions - - if not name: - names = self._list_metric_names(tenant_id, region, dimensions, offset=offset_name) - if names: - names = [elem['name'] for elem in names] - else: - names.append(name) - - if not names: - return metric_list - - for name in names: - if name == offset_name: - futures = self._list_metrics_by_name( - tenant_id, - region, - name, - dimensions, - offset_dimensions, - limit, - start_time=None, - end_time=None) - if offset_dimensions and dimensions: - offset_futures.extend(futures) - else: - non_offset_futures.extend(futures) - else: - non_offset_futures.extend( - self._list_metrics_by_name(tenant_id, region, name, dimensions, None, limit, - start_time=None, end_time=None)) - - # manually filter out metrics by the offset dimension - for future in offset_futures: - rows = future.result() - for row in rows: - if offset_dimensions >= row.dimensions: - continue - - metric_list.append(self._process_metric_row(row)) - - for future in non_offset_futures: - metric_list.extend((self._process_metric_row(row) for row in future.result())) - - return metric_list - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @staticmethod - def _process_metric_row(row): - dim_map = {} - for d in row.dimensions: - pair = d.split('\t') - dim_map[pair[0]] = pair[1] - - if row.metric_id is None: - LOG.error( - 'Metric is missing metric_id, using metric_id=None' - ' name: {}, dimensions: {}'.format( - row.metric_name, row.dimensions)) - return {'id': None, - 'name': row.metric_name, - 'dimensions': dim_map} - - metric = {'id': binascii.hexlify(bytearray(row.metric_id)), - 'name': row.metric_name, - 'dimensions': dim_map} - - return metric - - def _list_metrics_by_name( - self, - tenant_id, - region, - name, - dimensions, - dimension_offset, - limit, - start_time=None, - end_time=None): - - or_dimensions = [] - sub_dimensions = {} - futures = [] - - if not dimensions: - query = self._build_metrics_by_name_query( - tenant_id, - region, - name, - dimensions, - None, - start_time, - end_time, - dimension_offset, - limit) - futures.append(self.session.execute_async(query[0], query[1])) - return futures - - wildcard_dimensions = [] - for dim_name, dim_value in dimensions.items(): - if not dim_value: - wildcard_dimensions.append(dim_name) - - elif '|' in dim_value: - - def f(val): - return {dim_name: val} - - or_dimensions.append(list(map(f, sorted(dim_value.split('|'))))) - - else: - sub_dimensions[dim_name] = dim_value - - if or_dimensions: - or_dims_list = list(itertools.product(*or_dimensions)) - - for or_dims_tuple in or_dims_list: - extracted_dimensions = sub_dimensions.copy() - - for dims in iter(or_dims_tuple): - for k, v in dims.items(): - extracted_dimensions[k] = v - - query = self._build_metrics_by_name_query( - tenant_id, - region, - name, - extracted_dimensions, - wildcard_dimensions, - start_time, - end_time, - dimension_offset, - limit) - - futures.append(self.session.execute_async(query[0], query[1])) - - else: - query = self._build_metrics_by_name_query( - tenant_id, - region, - name, - sub_dimensions, - wildcard_dimensions, - start_time, - end_time, - dimension_offset, - limit) - futures.append(self.session.execute_async(query[0], query[1])) - - return futures - - def _get_metric_by_id(self, metric_id): - - rows = self.session.execute(self.metric_by_id_stmt, [bytearray.fromhex(metric_id)]) - - if rows: - return Metric(id=metric_id, name=rows[0].metric_name, dimensions=rows[0].dimensions) - - return None - - def _build_metrics_by_name_query( - self, - tenant_id, - region, - name, - dimensions, - wildcard_dimensions, - start_time, - end_time, - dim_offset, - limit): - - conditions = [REGION_EQ, TENANT_EQ] - params = [region, tenant_id.encode('utf8')] - - if name: - conditions.append(METRIC_NAME_EQ) - params.append(name) - else: - conditions.append('') - - if dimensions: - conditions.append(DIMENSIONS_CONTAINS * len(dimensions)) - params.extend( - [self._create_dimension_value_entry(dim_name, dim_value) - for dim_name, dim_value in dimensions.items()]) - else: - conditions.append('') - - if wildcard_dimensions: - conditions.append(DIMENSIONS_NAME_CONTAINS * len(wildcard_dimensions)) - params.extend(wildcard_dimensions) - else: - conditions.append('') - - if dim_offset and not dimensions: - # cassandra does not allow using both contains and GT in collection column - conditions.append(DIMENSIONS_GT) - params.append(dim_offset) - else: - conditions.append('') - - if start_time: - conditions.append(UPDATED_TIME_GE % start_time) - else: - conditions.append('') - - if end_time: - conditions.append(CREATED_TIME_LE % end_time) - else: - conditions.append('') - - if limit: - conditions.append(LIMIT_CLAUSE) - params.append(limit) - else: - conditions.append('') - - if (not name) or dimensions or wildcard_dimensions or start_time or end_time: - conditions.append(ALLOW_FILTERING) - else: - conditions.append('') - - return METRIC_LIST_CQL % tuple(conditions), params - - @staticmethod - def _create_dimension_value_entry(name, value): - return '%s\t%s' % (name, value) - - def list_metric_names(self, tenant_id, region, dimensions): - return self._list_metric_names(tenant_id, region, dimensions) - - def _list_metric_names(self, tenant_id, region, dimensions, offset=None): - - or_dimensions = [] - single_dimensions = {} - - if dimensions: - for key, value in dimensions.items(): - if not value: - continue - - elif '|' in value: - def f(val): - return {key: val} - - or_dimensions.append(list(map(f, sorted(value.split('|'))))) - - else: - single_dimensions[key] = value - - if or_dimensions: - - names = [] - or_dims_list = list(itertools.product(*or_dimensions)) - - for or_dims_tuple in or_dims_list: - extracted_dimensions = single_dimensions.copy() - - for dims in iter(or_dims_tuple): - for k, v in dims.items(): - extracted_dimensions[k] = v - - names.extend( - self._list_metric_names_single_dimension_value( - tenant_id, region, extracted_dimensions, offset)) - - names.sort(key=lambda x: x[u'name']) - return names - - else: - names = self._list_metric_names_single_dimension_value( - tenant_id, region, single_dimensions, offset) - names.sort(key=lambda x: x[u'name']) - return names - - def _list_metric_names_single_dimension_value(self, tenant_id, region, dimensions, offset=None): - - try: - futures = [] - if dimensions: - for name, value in dimensions.items(): - if offset: - futures.append( - self.session.execute_async( - self.metric_name_by_dimension_offset_stmt, [ - region, tenant_id, name, value, offset])) - else: - futures.append( - self.session.execute_async( - self.metric_name_by_dimension_stmt, [ - region, tenant_id, name, value])) - - else: - if offset: - futures.append( - self.session.execute_async( - self.metric_name_offset_stmt, [ - region, tenant_id, offset])) - else: - futures.append( - self.session.execute_async( - self.metric_name_stmt, [ - region, tenant_id])) - - names_list = [] - - for future in futures: - rows = future.result() - tmp = set() - for row in rows: - tmp.add(row.metric_name) - - names_list.append(tmp) - - return [{u'name': v} for v in set.intersection(*names_list)] - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - def measurement_list(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, offset, limit, - merge_metrics_flag, group_by): - - metrics = self.list_metrics(tenant_id, region, name, dimensions, None, None) - - if offset: - tmp = offset.split("_") - if len(tmp) > 1: - offset_id = tmp[0] - offset_timestamp = tmp[1] - else: - offset_id = None - offset_timestamp = offset - else: - offset_timestamp = None - offset_id = None - - if not metrics: - return None - elif len(metrics) > 1: - if not merge_metrics_flag and not group_by: - raise exceptions.MultipleMetricsException(self.MULTIPLE_METRICS_MESSAGE) - - try: - if len(metrics) > 1 and not group_by: - # offset is controlled only by offset_timestamp when the group by option - # is not enabled - count, series_list = self._query_merge_measurements(metrics, - dimensions, - start_timestamp, - end_timestamp, - offset_timestamp, - limit) - return series_list - - if group_by: - if not isinstance(group_by, list): - group_by = group_by.split(',') - elif len(group_by) == 1: - group_by = group_by[0].split(',') - - if len(metrics) == 1 or group_by[0].startswith('*'): - if offset_id: - for index, metric in enumerate(metrics): - if metric['id'] == offset_id: - if index > 0: - metrics[0:index] = [] - break - - count, series_list = self._query_measurements(metrics, - start_timestamp, - end_timestamp, - offset_timestamp, - limit) - - return series_list - - grouped_metrics = self._group_metrics(metrics, group_by, dimensions) - - if not grouped_metrics or len(grouped_metrics) == 0: - return None - - if offset_id: - found_offset = False - for outer_index, sublist in enumerate(grouped_metrics): - for inner_index, metric in enumerate(sublist): - if metric['id'] == offset_id: - found_offset = True - if inner_index > 0: - sublist[0:inner_index] = [] - break - if found_offset: - if outer_index > 0: - grouped_metrics[0:outer_index] = [] - break - - remaining = limit - series_list = [] - for sublist in grouped_metrics: - sub_count, results = self._query_merge_measurements(sublist, - sublist[0]['dimensions'], - start_timestamp, - end_timestamp, - offset_timestamp, - remaining) - - series_list.extend(results) - - if remaining: - remaining -= sub_count - if remaining <= 0: - break - - # offset_timestamp is used only in the first group, reset to None for - # subsequent groups - if offset_timestamp: - offset_timestamp = None - - return series_list - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - def _query_merge_measurements(self, metrics, dimensions, start_timestamp, end_timestamp, - offset_timestamp, limit): - results = [] - for metric in metrics: - if limit and len(metrics) > 1: - fetch_size = min(limit, max(1000, limit / len(metrics) + 2)) - else: - fetch_size = None - query = self._build_measurement_query(metric['id'], - start_timestamp, - end_timestamp, - offset_timestamp, - limit, - fetch_size) - results.append((metric, iter(self.session.execute_async(query[0], query[1]).result()))) - - return self._merge_series(results, dimensions, limit) - - def _query_measurements(self, metrics, start_timestamp, end_timestamp, - offset_timestamp, limit): - results = [] - for index, metric in enumerate(metrics): - if index == 0: - query = self._build_measurement_query(metric['id'], - start_timestamp, - end_timestamp, - offset_timestamp, - limit) - else: - if limit: - fetch_size = min(self.session.default_fetch_size, - max(1000, limit / min(index, 4))) - else: - fetch_size = self.session.default_fetch_size - query = self._build_measurement_query(metric['id'], - start_timestamp, - end_timestamp, - None, - limit, - fetch_size) - - results.append([metric, - iter(self.session.execute_async(query[0], query[1]).result())]) - - series_list = [] - count = 0 - for result in results: - measurements = [] - row = next(result[1], None) - while row: - measurements.append( - [self._isotime_msec(row.time_stamp), row.value, - rest_utils.from_json(row.value_meta) if row.value_meta else {}]) - count += 1 - if limit and count >= limit: - break - - row = next(result[1], None) - - series_list.append({'name': result[0]['name'], - 'id': result[0]['id'], - 'columns': ['timestamp', 'value', 'value_meta'], - 'measurements': measurements, - 'dimensions': result[0]['dimensions']}) - if limit and count >= limit: - break - - return count, series_list - - @staticmethod - def _build_measurement_query(metric_id, start_timestamp, - end_timestamp, offset_timestamp, - limit=None, fetch_size=FETCH_SIZE_UNSET): - conditions = [METRIC_ID_EQ] - decode_metric_id = metric_id if six.PY2 else metric_id.decode('utf-8') - params = [bytearray.fromhex(decode_metric_id)] - - if offset_timestamp: - conditions.append(OFFSET_TIME_GT) - params.append(offset_timestamp) - elif start_timestamp: - conditions.append(START_TIME_GE) - params.append(int(start_timestamp * 1000)) - else: - conditions.append('') - - if end_timestamp: - conditions.append(END_TIME_LE) - params.append(int(end_timestamp * 1000)) - else: - conditions.append('') - - if limit: - conditions.append(LIMIT_CLAUSE) - params.append(limit) - else: - conditions.append('') - - return SimpleStatement(MEASUREMENT_LIST_CQL % - tuple(conditions), fetch_size=fetch_size), params - - def _merge_series(self, series, dimensions, limit): - series_list = [] - - if not series: - return series_list - - measurements = [] - top_batch = [] - num_series = len(series) - for i in range(0, num_series): - row = next(series[i][1], None) - if row: - top_batch.append([i, - row.time_stamp, - row.value, - rest_utils.from_json(row.value_meta) if row.value_meta else {}]) - else: - num_series -= 1 - - top_batch.sort(key=lambda m: m[1], reverse=True) - - count = 0 - while (not limit or count < limit) and top_batch: - measurements.append([self._isotime_msec(top_batch[num_series - 1][1]), - top_batch[num_series - 1][2], - top_batch[num_series - 1][3]]) - count += 1 - row = next(series[top_batch[num_series - 1][0]][1], None) - if row: - top_batch[num_series - 1] = \ - [top_batch[num_series - 1][0], row.time_stamp, - row.value, rest_utils.from_json(row.value_meta) if row.value_meta else {}] - - top_batch.sort(key=lambda m: m[1], reverse=True) - else: - num_series -= 1 - top_batch.pop() - - series_list.append({'name': series[0][0]['name'], - 'id': series[0][0]['id'], - 'columns': ['timestamp', 'value', 'value_meta'], - 'measurements': measurements, - 'dimensions': dimensions}) - - return count, series_list - - @staticmethod - def _group_metrics(metrics, group_by, search_by): - - grouped_metrics = {} - for metric in metrics: - key = '' - display_dimensions = dict(search_by.items()) - for name in group_by: - # '_' ensures te key with missing dimension is sorted lower - value = metric['dimensions'].get(name, '_') - if value != '_': - display_dimensions[name] = value - key = key + '='.join((urllib.quote_plus(name), urllib.quote_plus(value))) + '&' - - metric['dimensions'] = display_dimensions - - if key in grouped_metrics: - grouped_metrics[key].append(metric) - else: - grouped_metrics[key] = [metric] - - grouped_metrics = grouped_metrics.items() - grouped_metrics.sort(key=lambda k: k[0]) - return [x[1] for x in grouped_metrics] - - @staticmethod - def _isotime_msec(timestamp): - """Stringify datetime in ISO 8601 format + millisecond. - """ - st = timestamp.isoformat() - if '.' in st: - st = st[:23] + u'Z' - else: - st += u'.000Z' - return st - - def metrics_statistics(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, statistics, - period, offset, limit, merge_metrics_flag, - group_by): - - if not period: - period = 300 - else: - period = int(period) - - series_list = self.measurement_list(tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, - offset, None, merge_metrics_flag, group_by) - - json_statistics_list = [] - - if not series_list: - return json_statistics_list - - statistics = [stat.lower() for stat in statistics] - - columns = [u'timestamp'] - - columns.extend([x for x in ['avg', 'min', 'max', 'count', 'sum'] if x in statistics]) - - start_time = datetime.utcfromtimestamp(start_timestamp) - if end_timestamp: - end_time = datetime.utcfromtimestamp(end_timestamp) - else: - end_time = datetime.utcnow() - - for series in series_list: - - if limit <= 0: - break - - measurements = series['measurements'] - - if not measurements: - continue - - first_measure = measurements[0] - first_measure_start_time = MetricsRepository._parse_time_string(first_measure[0]) - - # skip blank intervals at the beginning, finds the start time of stat - # period that is not empty - stat_start_time = start_time + timedelta( - seconds=((first_measure_start_time - start_time).seconds / period) * period) - - stats_list = [] - stats_count = 0 - stats_sum = 0 - stats_min = stats_max = first_measure[1] - - for measurement in series['measurements']: - - time_stamp = MetricsRepository._parse_time_string(measurement[0]) - value = measurement[1] - - if (time_stamp - stat_start_time).seconds >= period: - - stat = MetricsRepository._create_stat(statistics, stat_start_time, stats_count, - stats_sum, stats_min, stats_max) - - stats_list.append(stat) - limit -= 1 - if limit <= 0: - break - - # initialize the new stat period - stats_sum = value - stats_count = 1 - stats_min = value - stats_max = value - stat_start_time += timedelta(seconds=period) - - else: - stats_min = min(stats_min, value) - stats_max = max(stats_max, value) - stats_count += 1 - stats_sum += value - - if stats_count: - stat = MetricsRepository._create_stat( - statistics, stat_start_time, stats_count, stats_sum, stats_min, stats_max) - stats_list.append(stat) - limit -= 1 - - stats_end_time = stat_start_time + timedelta(seconds=period) - timedelta(milliseconds=1) - if stats_end_time > end_time: - stats_end_time = end_time - - statistic = {u'name': encodeutils.safe_decode(name, 'utf-8'), - u'id': series['id'], - u'dimensions': series['dimensions'], - u'columns': columns, - u'statistics': stats_list, - u'end_time': self._isotime_msec(stats_end_time)} - - json_statistics_list.append(statistic) - - return json_statistics_list - - @staticmethod - def _create_stat( - statistics, - timestamp, - stat_count=None, - stat_sum=None, - stat_min=None, - stat_max=None): - - stat = [MetricsRepository._isotime_msec(timestamp)] - - if not stat_count: - stat.extend([0] * len(statistics)) - - else: - if 'avg' in statistics: - stat.append(stat_sum / stat_count) - - if 'min' in statistics: - stat.append(stat_min) - - if 'max' in statistics: - stat.append(stat_max) - - if 'count' in statistics: - stat.append(stat_count) - - if 'sum' in statistics: - stat.append(stat_sum) - - return stat - - @staticmethod - def _parse_time_string(timestamp): - dt = timeutils.parse_isotime(timestamp) - dt = timeutils.normalize_time(dt) - return dt - - def alarm_history(self, tenant_id, alarm_id_list, - offset, limit, start_timestamp=None, - end_timestamp=None): - - try: - - json_alarm_history_list = [] - - if not alarm_id_list: - return json_alarm_history_list - - conditions = [ALARM_TENANT_ID_EQ] - params = [tenant_id.encode('utf8')] - if len(alarm_id_list) == 1: - conditions.append(ALARM_ID_EQ) - params.append(alarm_id_list[0]) - else: - conditions.append( - ' and alarm_id in ({}) '.format( - ','.join( - ['%s'] * - len(alarm_id_list)))) - for alarm_id in alarm_id_list: - params.append(alarm_id) - - if offset: - conditions.append(OFFSET_TIME_GT) - params.append(offset) - - elif start_timestamp: - conditions.append(START_TIME_GE) - params.append(int(start_timestamp * 1000)) - else: - conditions.append('') - - if end_timestamp: - conditions.append(END_TIME_LE) - params.append(int(end_timestamp * 1000)) - else: - conditions.append('') - - if limit: - conditions.append(LIMIT_CLAUSE) - params.append(limit + 1) - else: - conditions.append('') - - rows = self.session.execute(ALARM_HISTORY_CQL % tuple(conditions), params) - - if not rows: - return json_alarm_history_list - - sorted_rows = sorted(rows, key=lambda row: row.time_stamp) - - for (tenant_id, alarm_id, time_stamp, metrics, new_state, old_state, reason, - reason_data, sub_alarms) in sorted_rows: - - alarm = {u'timestamp': self._isotime_msec(time_stamp), - u'alarm_id': alarm_id, - u'metrics': rest_utils.from_json(metrics), - u'new_state': new_state, - u'old_state': old_state, - u'reason': reason, - u'reason_data': reason_data, - u'sub_alarms': rest_utils.from_json(sub_alarms), - u'id': str(int((time_stamp - self.epoch).total_seconds() * 1000))} - - if alarm[u'sub_alarms']: - - for sub_alarm in alarm[u'sub_alarms']: - sub_expr = sub_alarm['sub_alarm_expression'] - metric_def = sub_expr['metric_definition'] - sub_expr['metric_name'] = metric_def['name'] - sub_expr['dimensions'] = metric_def['dimensions'] - del sub_expr['metric_definition'] - - json_alarm_history_list.append(alarm) - - return json_alarm_history_list - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @staticmethod - def check_status(): - try: - cluster = Cluster( - CONF.cassandra.contact_points - ) - session = cluster.connect(CONF.cassandra.keyspace) - session.shutdown() - except Exception as ex: - LOG.exception(str(ex)) - return False, str(ex) - return True, 'OK' diff --git a/monasca_api/common/repositories/constants.py b/monasca_api/common/repositories/constants.py deleted file mode 100644 index cedd99167..000000000 --- a/monasca_api/common/repositories/constants.py +++ /dev/null @@ -1 +0,0 @@ -PAGE_LIMIT = 10000 diff --git a/monasca_api/common/repositories/exceptions.py b/monasca_api/common/repositories/exceptions.py deleted file mode 100644 index 2bdb6ee0d..000000000 --- a/monasca_api/common/repositories/exceptions.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_common.repositories.exceptions import AlreadyExistsException -from monasca_common.repositories.exceptions import DoesNotExistException -from monasca_common.repositories.exceptions import InvalidUpdateException -from monasca_common.repositories.exceptions import RepositoryException - - -class MultipleMetricsException(RepositoryException): - pass - - -class UnsupportedDriverException(Exception): - pass - - -__all__ = ( - 'AlreadyExistsException', - 'DoesNotExistException', - 'InvalidUpdateException', - 'RepositoryException', - 'MultipleMetricsException', - 'UnsupportedDriverException' -) diff --git a/monasca_api/common/repositories/fake/__init__.py b/monasca_api/common/repositories/fake/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/fake/metrics_repository.py b/monasca_api/common/repositories/fake/metrics_repository.py deleted file mode 100644 index 545bcd343..000000000 --- a/monasca_api/common/repositories/fake/metrics_repository.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_api.common.repositories import metrics_repository - - -class MetricsRepository(metrics_repository.AbstractMetricsRepository): - def __init__(self): - return - - def list_metrics(self, tenant_id, name, dimensions, offset, limit): - return {} diff --git a/monasca_api/common/repositories/influxdb/__init__.py b/monasca_api/common/repositories/influxdb/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/influxdb/metrics_repository.py b/monasca_api/common/repositories/influxdb/metrics_repository.py deleted file mode 100644 index b0a87aab9..000000000 --- a/monasca_api/common/repositories/influxdb/metrics_repository.py +++ /dev/null @@ -1,973 +0,0 @@ -# (C) Copyright 2014-2018 Hewlett Packard Enterprise Development LP -# Copyright 2015 Cray Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from datetime import datetime -from datetime import timedelta -from distutils import version - -from influxdb import client -from influxdb.exceptions import InfluxDBClientError -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import requests -from six import PY3 - -from monasca_api.common.rest import utils as rest_utils - -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories import metrics_repository - -MEASUREMENT_NOT_FOUND_MSG = "measurement not found" -DATABASE_NOT_FOUND_MSG = "database not found" - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class MetricsRepository(metrics_repository.AbstractMetricsRepository): - def __init__(self): - - try: - self.conf = cfg.CONF - self.influxdb_client = client.InfluxDBClient( - self.conf.influxdb.ip_address, self.conf.influxdb.port, - self.conf.influxdb.user, self.conf.influxdb.password) - self._version = None - self._init_version() - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - self._serie_builders_version_map = { - 'from_0.11.0': (self._build_serie_dimension_values_from_v0_11_0, - self._build_serie_metric_list_from_v0_11_0)} - - def _build_serie_dimension_values(self, *args, **kwargs): - if self._version: - f = self._serie_builders_version_map[self._version][0] - return f(*args, **kwargs) - else: - self._init_version() - if self._version: - f = self._serie_builders_version_map[self._version][0] - return f(*args, **kwargs) - LOG.error('influxdb is not available, giving up') - raise exceptions.RepositoryException('Repository not available') - - def _build_serie_metric_list(self, *args, **kwargs): - if self._version: - f = self._serie_builders_version_map[self._version][1] - return f(*args, **kwargs) - else: - self._init_version() - if self._version: - f = self._serie_builders_version_map[self._version][1] - return f(*args, **kwargs) - LOG.error('influxdb is not available, giving up') - raise exceptions.RepositoryException('Repository not available') - - def _init_version(self): - '''Initializes functions for serie builders that are specific to different versions - of InfluxDB. - - In InfluxDB v0.11.0 the SHOW SERIES output changed. See, - https://github.com/influxdata/influxdb/blob/master/CHANGELOG.md#v0110-2016-03-22 - ''' - try: - influxdb_version = self._get_influxdb_version() - if influxdb_version < version.StrictVersion('0.11.0'): - raise Exception('Influxdb < v0.11.0 is not supported') - else: - self._version = 'from_0.11.0' - LOG.info('Initialize InfluxDB serie builders >= v0.11.0') - except (requests.exceptions.ConnectionError, - requests.exceptions.ConnectTimeout): - # these errors mean that the backend is not ready yet, we shall try - # to build series on each request - LOG.warning('influxdb is not ready yet') - except Exception as ex: - LOG.exception(ex) - self._version = 'from_0.11.0' - LOG.info('Initialize InfluxDB serie builders >= v0.11.0') - - def _get_influxdb_version(self): - '''New versions of InfluxDB implements the method ping() which returns - the version. In case that the method isn't present, the query SHOW DIAGNOSTICS - will be used. - If Version found in the result set, return the InfluxDB Version, - otherwise raise an exception. InfluxDB has changed the format of their - result set and SHOW DIAGNOSTICS was introduced at some point so earlier releases - of InfluxDB might not return a Version. - ''' - try: - result = self.influxdb_client.ping() - LOG.info("Found Influxdb version {0}".format(result)) - return version.StrictVersion(result) - except Exception as ex: - LOG.warn(ex) - LOG.warn("Getting version from method ping failed," - " now trying with SHOW DIAGNOSTICS") - - try: - result = self.influxdb_client.query('SHOW DIAGNOSTICS') - except InfluxDBClientError as ex: - LOG.exception(ex) - raise - - if 'series' not in result.raw: - LOG.exception('series not in result.raw') - raise Exception('Series not in SHOW DIAGNOSTICS result set') - - for series in result.raw['series']: - if 'columns' not in series: - continue - columns = series['columns'] - if u'Version' not in series['columns']: - continue - if u'values' not in series: - continue - for value in series[u'values']: - version_index = columns.index(u'Version') - version_str = value[version_index] - return version.StrictVersion(version_str) - raise Exception('Version not found in SHOW DIAGNOSTICS result set') - - def _build_show_series_query(self, dimensions, name, tenant_id, region, - start_timestamp=None, end_timestamp=None): - - where_clause = self._build_where_clause(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp) - - query = 'show series ' + where_clause - - return query - - def _build_show_measurements_query(self, dimensions, name, tenant_id, region): - - where_clause = self._build_where_clause(dimensions, name, tenant_id, - region) - - query = 'show measurements ' + where_clause - - return query - - def _build_show_tag_values_query(self, metric_name, dimension_name, - tenant_id, region, start_timestamp, - end_timestamp): - from_with_clause = '' - if metric_name: - from_with_clause += ' from "{}"'.format(metric_name) - - if dimension_name: - from_with_clause += ' with key = "{}"'.format(dimension_name) - - where_clause = self._build_where_clause(None, None, tenant_id, region, - start_timestamp, end_timestamp) - - query = 'show tag values' + from_with_clause + where_clause - - return query - - def _build_show_tag_keys_query(self, metric_name, tenant_id, region, - start_timestamp, end_timestamp): - from_with_clause = '' - if metric_name: - from_with_clause += ' from "{}"'.format(metric_name) - - where_clause = self._build_where_clause(None, None, tenant_id, region, - start_timestamp, end_timestamp) - - query = 'show tag keys' + from_with_clause + where_clause - - return query - - def _build_select_measurement_query(self, dimensions, name, tenant_id, - region, start_timestamp, end_timestamp, - offset, group_by, limit): - - from_clause = self._build_from_clause(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp) - - offset_clause = self._build_offset_clause(offset) - - group_by_clause = self._build_group_by_clause(group_by) - - limit_clause = self._build_limit_clause(limit) - - query = 'select value, value_meta '\ - + from_clause + offset_clause\ - + group_by_clause + limit_clause - - return query - - def _build_select_all_query(self, dimensions, name, tenant_id, - region, start_timestamp, end_timestamp, - no_record_check_dim): - - from_clause = self._build_from_clause(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp) - if no_record_check_dim is not None: - query = 'select *' + from_clause + " and {} != ''".format(no_record_check_dim) - else: - query = 'select *' + from_clause - - return query - - def _build_statistics_query(self, dimensions, name, tenant_id, - region, start_timestamp, end_timestamp, - statistics, period, offset, group_by, limit): - - from_clause = self._build_from_clause(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp) - if period is None: - period = str(300) - - if offset: - if '_' in offset: - tmp = datetime.strptime(str(offset).split('_')[1], "%Y-%m-%dT%H:%M:%SZ") - tmp = tmp + timedelta(seconds=int(period)) - # Leave out any ID as influx doesn't understand it - offset = tmp.isoformat() - else: - tmp = datetime.strptime(offset, "%Y-%m-%dT%H:%M:%SZ") - offset = tmp + timedelta(seconds=int(period)) - - offset_clause = (" and time > '{}'".format(offset)) - from_clause += offset_clause - - statistics = [statistic.replace('avg', 'mean') for statistic in - statistics] - statistics = [statistic + '(value)' for statistic in statistics] - - statistic_string = ",".join(statistics) - - query = 'select ' + statistic_string + ' ' + from_clause - - query += self._build_group_by_clause(group_by, period) - - limit_clause = self._build_limit_clause(limit) - - query += limit_clause - - return query - - def _build_where_clause(self, dimensions, name, tenant_id, region, - start_timestamp=None, end_timestamp=None): - - where_clause = '' - - # name - optional - if name: - # replace ' with \' to make query parsable - clean_name = name.replace("'", "\\'") if PY3 \ - else name.replace("'", "\\'").encode('utf-8') - where_clause += ' from "{}"'.format(clean_name) - - # region - where_clause += " where _region = '{}'".format(region) - - # tenant id - if not self.conf.influxdb.db_per_tenant: - where_clause += " and _tenant_id = '{}'".format(tenant_id) - - # dimensions - optional - if dimensions: - for dimension_name, dimension_value in iter( - sorted(dimensions.items())): - # replace ' with \' to make query parsable - clean_dimension_name = dimension_name.replace("\'", "\\'") if PY3 \ - else dimension_name.replace("\'", "\\'").encode('utf-8') - if dimension_value == "": - where_clause += " and \"{}\" =~ /.+/".format( - clean_dimension_name) - elif '|' in dimension_value: - # replace ' with \' to make query parsable - clean_dimension_value = dimension_value.replace("\'", "\\'") if PY3 else \ - dimension_value.replace("\'", "\\'").encode('utf-8') - - where_clause += " and \"{}\" =~ /^{}$/".format( - clean_dimension_name, - clean_dimension_value) - else: - # replace ' with \' to make query parsable - clean_dimension_value = dimension_value.replace("\'", "\\'") if PY3 else \ - dimension_value.replace("\'", "\\'").encode('utf-8') - - where_clause += " and \"{}\" = '{}'".format( - clean_dimension_name, - clean_dimension_value) - - if start_timestamp is not None: - where_clause += " and time >= " + str(int(start_timestamp * - 1000000)) + "u" - - if end_timestamp is not None: - where_clause += " and time < " + str(int(end_timestamp * - 1000000)) + "u" - - return where_clause - - def _build_from_clause(self, dimensions, name, tenant_id, region, - start_timestamp=None, end_timestamp=None): - - from_clause = self._build_where_clause(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp) - return from_clause - - def query_tenant_db(self, query, tenant_id): - database = ('%s_%s' % (self.conf.influxdb.database_name, tenant_id) - if self.conf.influxdb.db_per_tenant - else self.conf.influxdb.database_name) - try: - return self.influxdb_client.query(query, database=database) - except InfluxDBClientError as ex: - if (str(ex).startswith(DATABASE_NOT_FOUND_MSG) and - self.conf.influxdb.db_per_tenant): - return None - else: - raise - - def list_metrics(self, tenant_id, region, name, dimensions, offset, - limit, start_timestamp=None, end_timestamp=None): - - try: - - query = self._build_show_series_query(dimensions, name, tenant_id, region) - - query += " limit {}".format(limit + 1) - - if offset: - query += ' offset {}'.format(int(offset) + 1) - - result = self.query_tenant_db(query, tenant_id) - - json_metric_list = self._build_serie_metric_list(result, - tenant_id, - region, - start_timestamp, - end_timestamp, - offset) - - return json_metric_list - - except InfluxDBClientError as ex: - if str(ex).startswith(MEASUREMENT_NOT_FOUND_MSG): - return [] - else: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - def _build_serie_dimension_values_from_v0_11_0(self, series_names, dimension_name): - '''In InfluxDB v0.11.0 the SHOW TAG VALUES output changed. - See, https://github.com/influxdata/influxdb/blob/master/CHANGELOG.md#v0110-2016-03-22 - ''' - dim_value_set = set() - json_dim_value_list = [] - - if not series_names: - return json_dim_value_list - if 'series' not in series_names.raw: - return json_dim_value_list - if not dimension_name: - return json_dim_value_list - - for series in series_names.raw['series']: - if 'columns' not in series: - continue - columns = series['columns'] - if 'key' not in columns: - continue - if u'values' not in series: - continue - for value in series[u'values']: - if len(value) < 2: - continue - for tag in value[1:]: - dim_value_set.add(tag) - - for value in dim_value_set: - json_dim_value_list.append({u'dimension_value': value}) - - json_dim_value_list = sorted(json_dim_value_list, key=lambda x: x[u'dimension_value']) - return json_dim_value_list - - def _build_serie_dimension_names(self, series_names): - dim_name_set = set() - json_dim_name_list = [] - - if not series_names: - return json_dim_name_list - if 'series' not in series_names.raw: - return json_dim_name_list - - for series in series_names.raw['series']: - if 'columns' not in series: - continue - if u'values' not in series: - continue - for value in series[u'values']: - tag_key = value[0] - if tag_key.startswith(u'_'): - continue - dim_name_set.add(tag_key) - - for name in dim_name_set: - json_dim_name_list.append({u'dimension_name': name}) - - json_dim_name_list = sorted(json_dim_name_list, key=lambda x: x[u'dimension_name']) - return json_dim_name_list - - def _build_serie_metric_list_from_v0_11_0(self, series_names, tenant_id, region, - start_timestamp, end_timestamp, offset): - '''In InfluxDB v0.11.0 the SHOW SERIES output changed. - See, https://github.com/influxdata/influxdb/blob/master/CHANGELOG.md#v0110-2016-03-22 - ''' - json_metric_list = [] - - if not series_names: - return json_metric_list - - if 'series' not in series_names.raw: - return json_metric_list - - metric_id = 0 - if offset: - metric_id = int(offset) + 1 - - for series in series_names.raw['series']: - if 'columns' not in series: - continue - columns = series['columns'] - if 'key' not in columns: - continue - key_index = columns.index('key') - if u'values' not in series: - continue - for value in series[u'values']: - split_value = value[key_index].split(',') - if len(split_value) < 2: - continue - serie_name = split_value[0] - dimensions = {} - for tag in split_value[1:]: - tag_key_value = tag.split('=') - if len(tag_key_value) < 2: - continue - tag_key = tag_key_value[0] - tag_value = tag_key_value[1] - if tag_key.startswith(u'_'): - continue - dimensions[tag_key] = tag_value - if not self._has_measurements(tenant_id, - region, - serie_name, - dimensions, - start_timestamp, - end_timestamp): - continue - metric = {u'id': str(metric_id), - u'name': serie_name, - u'dimensions': dimensions} - metric_id += 1 - json_metric_list.append(metric) - - return json_metric_list - - def _build_measurement_name_list(self, measurement_names): - """Read measurement names from InfluxDB response - - Extract the measurement names (InfluxDB terminology) from the SHOW MEASURMENTS result - to yield metric names - :param measurement_names: result from SHOW MEASUREMENTS call (json-dict) - :return: list of metric-names (Monasca terminology) - """ - - json_metric_list = [] - - if not measurement_names: - return json_metric_list - - for name in measurement_names.raw.get(u'series', [{}])[0].get(u'values', []): - entry = {u'name': name[0]} - json_metric_list.append(entry) - - json_metric_list = sorted(json_metric_list, key=lambda k: k['name']) - return json_metric_list - - def _get_dimensions(self, tenant_id, region, name, dimensions): - metrics_list = self.list_metrics(tenant_id, region, name, - dimensions, None, 2) - - if len(metrics_list) > 1: - raise exceptions.MultipleMetricsException(self.MULTIPLE_METRICS_MESSAGE) - - if not metrics_list: - return {} - - return metrics_list[0]['dimensions'] - - def measurement_list(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, offset, - limit, merge_metrics_flag, group_by): - - json_measurement_list = [] - - offset_id = 0 - offset_timestamp = offset - - if offset and "_" in offset: - offset_id_str, _, offset_timestamp = offset.partition('_') - offset_id = int(offset_id_str) - - try: - # the build query method apparently only considers offset timestamp. - query = self._build_select_measurement_query(dimensions, name, - tenant_id, - region, - start_timestamp, - end_timestamp, - offset_timestamp, group_by, - limit) - - if not group_by and not merge_metrics_flag: - dimensions = self._get_dimensions(tenant_id, region, name, dimensions) - query += " slimit 1" - - result = self.query_tenant_db(query, tenant_id) - - if not result: - return json_measurement_list - - index = offset_id - - for serie in result.raw['series']: - - if 'values' in serie: - - measurements_list = [] - for point in serie['values']: - value_meta = rest_utils.from_json(point[2]) if point[2] else {} - datetime = point[0][:19] - fraction = point[0][20:-1].ljust(3, '0') - timestamp = datetime + '.' + fraction[:3] + 'Z' - - measurements_list.append([timestamp, - point[1], - value_meta]) - - measurement = {u'name': serie['name'], - u'id': str(index), - u'columns': [u'timestamp', u'value', - u'value_meta'], - u'measurements': measurements_list} - - if not group_by: - measurement[u'dimensions'] = dimensions - else: - measurement[u'dimensions'] = { - key: value for key, - value in serie['tags'].items() if not key.startswith('_')} - - json_measurement_list.append(measurement) - index += 1 - - return json_measurement_list - - except exceptions.RepositoryException as ex: - - if (len(ex.args) and isinstance(ex.args[0], InfluxDBClientError) and - str(ex.args[0]).startswith(MEASUREMENT_NOT_FOUND_MSG)): - - return json_measurement_list - - else: - - LOG.exception(ex) - - raise ex - - except InfluxDBClientError as ex: - - if str(ex).startswith(MEASUREMENT_NOT_FOUND_MSG): - - return json_measurement_list - - else: - - LOG.exception(ex) - - raise exceptions.RepositoryException(ex) - - except Exception as ex: - - LOG.exception(ex) - - raise exceptions.RepositoryException(ex) - - def list_metric_names(self, tenant_id, region, dimensions): - - try: - - query = self._build_show_measurements_query(dimensions, None, tenant_id, - region) - - result = self.query_tenant_db(query, tenant_id) - - json_name_list = self._build_measurement_name_list(result) - return json_name_list - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - def metrics_statistics(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, statistics, - period, offset, limit, merge_metrics_flag, - group_by): - - json_statistics_list = [] - - try: - query = self._build_statistics_query(dimensions, name, tenant_id, - region, start_timestamp, - end_timestamp, statistics, - period, offset, group_by, limit) - - if not group_by and not merge_metrics_flag: - dimensions = self._get_dimensions(tenant_id, region, name, dimensions) - query += " slimit 1" - - result = self.query_tenant_db(query, tenant_id) - - if not result: - return json_statistics_list - - offset_id = 0 - if offset is not None: - offset_tuple = offset.split('_') - # If offset_id is given, add 1 since we want the next one - if len(offset_tuple) > 1: - offset_id = int(offset_tuple[0]) + 1 - index = offset_id - - for serie in result.raw['series']: - - if 'values' in serie: - columns = [column.replace('time', 'timestamp').replace('mean', 'avg') - for column in serie['columns']] - - stats_list = [] - for stats in serie['values']: - # remove sub-second timestamp values (period can never be less than 1) - timestamp = stats[0] - if '.' in timestamp: - stats[0] = str(timestamp)[:19] + 'Z' - for stat in stats[1:]: - # Only add row if there is a valid value in the row - if stat is not None: - stats_list.append(stats) - break - - statistic = {u'name': serie['name'], - u'id': str(index), - u'columns': columns, - u'statistics': stats_list} - - if not group_by: - statistic[u'dimensions'] = dimensions - else: - statistic[u'dimensions'] = { - key: value for key, - value in serie['tags'].items() if not key.startswith('_')} - - json_statistics_list.append(statistic) - index += 1 - - return json_statistics_list - - except exceptions.RepositoryException as ex: - - if (len(ex.args) and isinstance(ex.args[0], InfluxDBClientError) and - str(ex.args[0]).startswith(MEASUREMENT_NOT_FOUND_MSG)): - - return json_statistics_list - - else: - - LOG.exception(ex) - - raise ex - - except InfluxDBClientError as ex: - - if str(ex).startswith(MEASUREMENT_NOT_FOUND_MSG): - - return json_statistics_list - - else: - - LOG.exception(ex) - - raise exceptions.RepositoryException(ex) - - except Exception as ex: - - LOG.exception(ex) - - raise exceptions.RepositoryException(ex) - - def _build_offset_clause(self, offset): - if offset: - # offset may be given as a timestamp or as epoch time in ms - if str(offset).isdigit(): - # epoch time - offset_clause = " and time > {}ms".format(offset) - else: - # timestamp - offset_clause = " and time > '{}'".format(offset) - else: - offset_clause = "" - - return offset_clause - - def _build_group_by_clause(self, group_by, period=None): - if group_by is not None and not isinstance(group_by, list): - group_by = str(group_by).split(',') - if group_by or period: - items = [] - if group_by: - items.extend(group_by) - if period: - items.append("time(" + str(period) + "s) fill(none)") - clause = " group by " + ','.join(items) - else: - clause = "" - - return clause - - def _build_limit_clause(self, limit): - return " limit {} ".format(str(limit + 1)) - - def _has_measurements(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp): - - has_measurements = True - - # - # No need for the additional query if we don't have a start timestamp. - # - if not start_timestamp: - return True - - # - # We set limit to 1 for the measurement_list call, as we are only - # interested in knowing if there is at least one measurement, and - # not ask too much of influxdb. - # - measurements = self.measurement_list(tenant_id, - region, - name, - dimensions, - start_timestamp, - end_timestamp, - None, - 1, - False, - None) - - if len(measurements) == 0: - has_measurements = False - - return has_measurements - - def alarm_history(self, tenant_id, alarm_id_list, - offset, limit, start_timestamp=None, - end_timestamp=None): - - try: - - json_alarm_history_list = [] - - if not alarm_id_list: - return json_alarm_history_list - - for alarm_id in alarm_id_list: - if '\'' in alarm_id or ';' in alarm_id: - raise Exception( - "Input from user contains single quote ['] or " - "semi-colon [;] characters[ {} ]".format(alarm_id)) - - query = """ - select alarm_id, metrics, new_state, old_state, - reason, reason_data, sub_alarms, tenant_id - from alarm_state_history - """ - tenant_id = tenant_id if PY3 else tenant_id.encode('utf-8') - where_clause = ( - " where tenant_id = '{}' ".format(tenant_id)) - - alarm_id_where_clause_list = ( - [" alarm_id = '{}' ".format(alarm_id if PY3 else alarm_id.encode('utf8')) - for alarm_id in alarm_id_list]) - - alarm_id_where_clause = " or ".join(alarm_id_where_clause_list) - - where_clause += ' and (' + alarm_id_where_clause + ')' - - time_clause = '' - if start_timestamp: - time_clause += " and time >= " + str(int(start_timestamp * - 1000000)) + "u " - - if end_timestamp: - time_clause += " and time <= " + str(int(end_timestamp * - 1000000)) + "u " - - offset_clause = self._build_offset_clause(offset) - - limit_clause = self._build_limit_clause(limit) - - query += where_clause + time_clause + offset_clause + limit_clause - - result = self.query_tenant_db(query, tenant_id) - - if not result: - return json_alarm_history_list - - if 'values' in result.raw['series'][0]: - - for point in result.raw['series'][0]['values']: - point_list = list(point) - alarm_point = {u'timestamp': point_list[0], - u'alarm_id': point_list[1], - u'metrics': rest_utils.from_json(point_list[2]), - u'new_state': point_list[3], - u'old_state': point_list[4], - u'reason': point_list[5], - u'reason_data': point_list[6], - u'sub_alarms': rest_utils.from_json(point_list[7]), - u'id': str(self._get_millis_from_timestamp( - timeutils.parse_isotime(point_list[0])))} - - # java api formats these during json serialization - if alarm_point[u'sub_alarms']: - for sub_alarm in alarm_point[u'sub_alarms']: - sub_expr = sub_alarm['sub_alarm_expression'] - metric_def = sub_expr['metric_definition'] - sub_expr['metric_name'] = metric_def['name'] - sub_expr['dimensions'] = metric_def['dimensions'] - del sub_expr['metric_definition'] - - json_alarm_history_list.append(alarm_point) - - return json_alarm_history_list - - except Exception as ex: - - LOG.exception(ex) - - raise exceptions.RepositoryException(ex) - - def _get_millis_from_timestamp(self, dt): - dt = dt.replace(tzinfo=None) - return int((dt - datetime(1970, 1, 1)).total_seconds() * 1000) - - def list_dimension_values(self, tenant_id, region, metric_name, - dimension_name, start_timestamp=None, - end_timestamp=None): - try: - query = self._build_show_tag_values_query(metric_name, - dimension_name, - tenant_id, region, - start_timestamp, - end_timestamp) - result = self.query_tenant_db(query, tenant_id) - json_dim_value_list = self._build_serie_dimension_values( - result, dimension_name) - json_dim_value_list_filtered = list() - for serie in result.raw['series']: - for dim_value_dict in json_dim_value_list: - query = self._build_select_all_query( - dimensions={dimension_name: dim_value_dict['dimension_value']}, - name=serie['name'], - tenant_id=tenant_id, - region=region, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - no_record_check_dim=None) - result = self.query_tenant_db(query, tenant_id) - if len(result.raw['series']) > 0: - json_dim_value_list_filtered.append(dim_value_dict) - json_dim_value_list_filtered = sorted(json_dim_value_list_filtered, key=lambda - dim_value_dict: dim_value_dict['dimension_value']) - return json_dim_value_list_filtered - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - def list_dimension_names(self, tenant_id, region, metric_name, - start_timestamp=None, end_timestamp=None): - try: - query = self._build_show_tag_keys_query(metric_name, - tenant_id, region, - start_timestamp, - end_timestamp) - result = self.query_tenant_db(query, tenant_id) - json_dim_name_list = self._build_serie_dimension_names(result) - if metric_name is not None: - json_dim_name_list_filtered = list() - for dim_name_dict in json_dim_name_list: - query = self._build_select_all_query( - dimensions=None, - name=metric_name, - tenant_id=tenant_id, - region=region, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - no_record_check_dim=dim_name_dict['dimension_name']) - result = self.query_tenant_db(query, tenant_id) - if len(result.raw['series']) > 0: - json_dim_name_list_filtered.append(dim_name_dict) - - json_dim_name_list_filtered = sorted(json_dim_name_list_filtered, key=lambda - dim_name_dict: dim_name_dict['dimension_name']) - return json_dim_name_list_filtered - else: - return json_dim_name_list - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @staticmethod - def check_status(): - uri = 'http://{0}:{1}/ping'.format(CONF.influxdb.ip_address, - CONF.influxdb.port) - try: - resp = requests.head(url=uri, timeout=5) - except Exception as ex: - LOG.exception(str(ex)) - return False, str(ex) - - return resp.ok, 'OK' if resp.ok else 'Error: {0}'.format( - resp.status_code) diff --git a/monasca_api/common/repositories/metrics_repository.py b/monasca_api/common/repositories/metrics_repository.py deleted file mode 100644 index 355a2f378..000000000 --- a/monasca_api/common/repositories/metrics_repository.py +++ /dev/null @@ -1,63 +0,0 @@ -# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class AbstractMetricsRepository(object): - - MULTIPLE_METRICS_MESSAGE = ("Found multiple metrics matching metric name" + - " and dimensions. Please refine your search" + - " criteria using a unique" + - " metric name or additional dimensions." + - " Alternatively, you may specify" + - " 'merge_metrics=True' as a query" + - " parameter to combine all metrics" + - " matching search criteria into a single" + - " series.") - - @abc.abstractmethod - def list_metrics(self, tenant_id, region, name, dimensions, offset, limit): - pass - - @abc.abstractmethod - def list_metric_names(self, tenant_id, region, dimensions): - pass - - @abc.abstractmethod - def measurement_list(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, offset, limit, - merge_metrics_flag, - group_by): - pass - - @abc.abstractmethod - def metrics_statistics(self, tenant_id, region, name, dimensions, - start_timestamp, end_timestamp, statistics, - period, offset, limit, merge_metrics_flag, - group_by): - pass - - @abc.abstractmethod - def alarm_history(self, tenant_id, alarm_id_list, - offset, limit, start_timestamp, end_timestamp): - pass - - @staticmethod - @abc.abstractmethod - def check_status(): - pass diff --git a/monasca_api/common/repositories/model/__init__.py b/monasca_api/common/repositories/model/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/model/sub_alarm_definition.py b/monasca_api/common/repositories/model/sub_alarm_definition.py deleted file mode 100644 index cb50f5814..000000000 --- a/monasca_api/common/repositories/model/sub_alarm_definition.py +++ /dev/null @@ -1,155 +0,0 @@ -# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import encodeutils - - -class SubAlarmDefinition(object): - """Holds sub alarm definition - - Used for comparing sub alarm definitions for equality. - """ - - def __init__(self, row=None, sub_expr=None): - - """Initialize - - :param row: Database row - :param sub_expr: Result from expression parser - :return: - """ - - super(SubAlarmDefinition, self).__init__() - - if row and sub_expr: - raise Exception('Only one of row or sub_expr can be specified, ' - 'not both') - - if row: - # id is not used for compare or hash. - self.id = row['id'] - self.alarm_definition_id = row['alarm_definition_id'] - self.metric_name = row['metric_name'] - self.dimensions_str = row['dimensions'] - self.dimensions = self._init_dimensions(row['dimensions']) - self.function = row['function'] - self.operator = row['operator'] - self.period = row['period'] - self.periods = row['periods'] - self.threshold = row['threshold'] - self.deterministic = row['is_deterministic'] - - if sub_expr: - # id is not used for compare or hash. - self.id = '' - # Must be injected. - self.alarm_definition_id = '' - self.metric_name = sub_expr.metric_name - self.dimensions_str = sub_expr.dimensions_str - self.dimensions = self._init_dimensions(sub_expr.dimensions_str) - self.function = encodeutils.safe_decode(sub_expr.normalized_func, 'utf-8') - self.operator = sub_expr.normalized_operator - self.period = sub_expr.period - self.periods = sub_expr.periods - self.threshold = sub_expr.threshold - self.deterministic = sub_expr.deterministic - - def _init_dimensions(self, dimensions_str): - - dimensions = {} - - if dimensions_str: - for dimension in dimensions_str.split(','): - name, value = dimension.split('=') - dimensions[name] = value - - return dimensions - - @property - def expression(self): - - """Build the entire expressions as a string with spaces.""" - - result = u"{}({}".format(self.function.lower(), - self.metric_name) - - if self.dimensions_str: - result += u"{{{}}}".format(self.dimensions_str) - - if self.deterministic: - result += u", deterministic" - - if self.period: - result += u", {}".format(str(self.period)) - - result += u")" - - result += u" {} {}".format(self.operator, - str(self.threshold)) - - if self.periods: - result += u" times {}".format(str(self.periods)) - - return result - - def __hash__(self): - - dimensions_str = "".join(sorted([name + value for name, value in - self.dimensions.items()])) - - # don't use id to hash. - return (hash(self.alarm_definition_id) ^ - hash(dimensions_str) ^ - hash(self.function) ^ - hash(self.metric_name) ^ - hash(self.operator) ^ - hash(self.period) ^ - hash(self.periods) ^ - hash(self.deterministic) ^ - # Convert to float to handle cases like 0.0 == 0 - hash(float(self.threshold))) - - def __repr__(self): - - result = 'id={},alarm_definition_id={},function={},metric_name={},dimensions={}' .format( - self.id, self.alarm_definition_id, self.function, self.metric_name, self.dimensions) - result += ',operator={},period={},periods={},determinstic={}'\ - .format(self.operator, self.period, self.periods, self.deterministic) - return result - - def __eq__(self, other): - - if id(self) == id(other): - return True - - if not isinstance(other, SubAlarmDefinition): - return False - - # don't use id to compare. - return (self.alarm_definition_id == other.alarm_definition_id and - self.dimensions == other.dimensions and - self.function == other.function and - self.metric_name == other.metric_name and - self.operator == other.operator and - self.period == other.period and - self.periods == other.periods and - self.deterministic == other.deterministic and - # Convert to float to handle cases like 0.0 == 0 - float(self.threshold) == float(other.threshold)) - - def same_key_fields(self, other): - - # The metrics matched can't change - return (self.metric_name == other.metric_name and - self.dimensions == other.dimensions) diff --git a/monasca_api/common/repositories/notification_method_type_repository.py b/monasca_api/common/repositories/notification_method_type_repository.py deleted file mode 100644 index b9dc914e4..000000000 --- a/monasca_api/common/repositories/notification_method_type_repository.py +++ /dev/null @@ -1,25 +0,0 @@ -# (C) Copyright 2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class NotificationMethodTypeRepository(object): - - @abc.abstractmethod - def list_notification_method_types(self): - return diff --git a/monasca_api/common/repositories/notifications_repository.py b/monasca_api/common/repositories/notifications_repository.py deleted file mode 100644 index 1e9bbe9e5..000000000 --- a/monasca_api/common/repositories/notifications_repository.py +++ /dev/null @@ -1,44 +0,0 @@ -# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class NotificationsRepository(object): - - @abc.abstractmethod - def create_notification(self, tenant_id, name, notification_type, - address, period): - return - - @abc.abstractmethod - def list_notifications(self, tenant_id, sort_by, offset, limit): - return - - @abc.abstractmethod - def delete_notification(self, tenant_id, notification_id): - return - - @abc.abstractmethod - def list_notification(self, tenant_id, notification_id): - return - - @abc.abstractmethod - def update_notification(self, notification_id, tenant_id, name, notification_type, - address, period): - return diff --git a/monasca_api/common/repositories/sqla/__init__.py b/monasca_api/common/repositories/sqla/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/repositories/sqla/alarm_definitions_repository.py b/monasca_api/common/repositories/sqla/alarm_definitions_repository.py deleted file mode 100644 index 0fb2074f8..000000000 --- a/monasca_api/common/repositories/sqla/alarm_definitions_repository.py +++ /dev/null @@ -1,803 +0,0 @@ -# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import six - -from oslo_utils import encodeutils -from oslo_utils import uuidutils -from sqlalchemy import MetaData, update, delete, insert -from sqlalchemy import select, text, bindparam, null, literal_column -from sqlalchemy import or_ - -from monasca_api.common.repositories import alarm_definitions_repository as adr -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories.model import sub_alarm_definition -from monasca_api.common.repositories.sqla import models -from monasca_api.common.repositories.sqla import sql_repository - - -class AlarmDefinitionsRepository(sql_repository.SQLRepository, - adr.AlarmDefinitionsRepository): - - def __init__(self): - super(AlarmDefinitionsRepository, self).__init__() - - metadata = MetaData() - self.a = models.create_a_model(metadata) - self.aa = models.create_aa_model(metadata) - self.ad = models.create_ad_model(metadata) - self.am = models.create_am_model(metadata) - self.nm = models.create_nm_model(metadata) - self.md = models.create_md_model(metadata) - self.mde = models.create_mde_model(metadata) - self.mdd = models.create_mdd_model(metadata) - self.sa = models.create_sa_model(metadata) - self.sad = models.create_sad_model(metadata) - self.sadd = models.create_sadd_model(metadata) - a = self.a - aa = self.aa - ad = self.ad - am = self.am - nm = self.nm - md = self.md - sa = self.sa - mdd = self.mdd - mde = self.mde - sad = self.sad - sadd = self.sadd - - a_s = a.alias('a') - ad_s = ad.alias('ad') - self.ad_s = ad_s - am_s = am.alias('am') - nm_s = nm.alias('nm') - md_s = md.alias('md') - sa_s = sa.alias('sa') - mdd_s = mdd.alias('mdd') - mde_s = mde.alias('mde') - sad_s = sad.alias('sad') - sadd_s = sadd.alias('sadd') - - aaa_aa = aa.alias('aaa_aa') - aaa = (select(aaa_aa.c.alarm_definition_id, - models.group_concat([aaa_aa.c.action_id]).label('alarm_actions')) - .select_from(aaa_aa) - .where(aaa_aa.c.alarm_state == text("'ALARM'")) - .group_by(aaa_aa.c.alarm_definition_id) - .alias('aaa')) - - aao_aa = aa.alias('aao_aa') - aao = (select(aao_aa.c.alarm_definition_id, - models.group_concat([aao_aa.c.action_id]).label('ok_actions')) - .select_from(aao_aa) - .where(aao_aa.c.alarm_state == text("'OK'")) - .group_by(aao_aa.c.alarm_definition_id) - .alias('aao')) - - aau_aa = aa.alias('aau_aa') - aau = (select(aau_aa.c.alarm_definition_id, - models.group_concat([aau_aa.c.action_id]).label('undetermined_actions')) - .select_from(aau_aa) - .where(aau_aa.c.alarm_state == text("'UNDETERMINED'")) - .group_by(aau_aa.c.alarm_definition_id) - .alias('aau')) - - self.base_query_from = (ad_s.outerjoin(aaa, aaa.c.alarm_definition_id == ad_s.c.id) - .outerjoin(aao, aao.c.alarm_definition_id == ad_s.c.id) - .outerjoin(aau, aau.c.alarm_definition_id == ad_s.c.id)) - - self.base_query = (select(ad_s.c.id, - ad_s.c.name, - ad_s.c.description, - ad_s.c.expression, - ad_s.c.match_by, - ad_s.c.severity, - ad_s.c.actions_enabled, - aaa.c.alarm_actions, - aao.c.ok_actions, - aau.c.undetermined_actions)) - - self.get_sub_alarms_query = ( - select(sa_s.c.id.label('sub_alarm_id'), - sa_s.c.alarm_id, - sa_s.c.expression) .select_from( - sa_s.join( - a_s, - a_s.c.id == sa_s.c.alarm_id) .join( - ad_s, - ad_s.c.id == a_s.c.alarm_definition_id)) .where( - ad_s.c.tenant_id == bindparam('b_tenant_id')) .where( - ad_s.c.id == bindparam('b_id')) .distinct()) - - mdg = (select(md_s.c.dimension_set_id, - models.group_concat( - [md_s.c.name + text("'='") + md_s.c.value]).label('dimensions')) - .select_from(md_s) - .group_by(md_s.c.dimension_set_id) - .alias('mdg')) - - self.get_alarm_metrics_query = ( - select( - a_s.c.id.label('alarm_id'), - mde_s.c.name, - mdg.c.dimensions) .select_from( - a_s.join( - ad_s, - ad_s.c.id == a_s.c.alarm_definition_id) .join( - am_s, - am_s.c.alarm_id == a_s.c.id) .join( - mdd_s, - mdd_s.c.id == am_s.c.metric_definition_dimensions_id) .join( - mde_s, - mde_s.c.id == mdd_s.c.metric_definition_id) .outerjoin( - mdg, - mdg.c.dimension_set_id == mdd_s.c.metric_dimension_set_id)) .where( - ad_s.c.tenant_id == bindparam('b_tenant_id')) .where( - ad_s.c.id == bindparam('b_id')) .order_by( - a_s.c.id) .distinct()) - - self.soft_delete_ad_query = (update(ad) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(ad.c.id == bindparam('b_id')) - .where(ad.c.deleted_at == null()) - .values(deleted_at=datetime.datetime.utcnow())) - - self.delete_a_query = (delete(a) - .where(a.c.alarm_definition_id == bindparam('b_id'))) - - columns_gc = [sadd_s.c.dimension_name + text("'='") + sadd_s.c.value] - saddg = (select(sadd_s.c.sub_alarm_definition_id, - models.group_concat(columns_gc).label('dimensions')) - .select_from(sadd_s) - .group_by(sadd_s.c.sub_alarm_definition_id) - .alias('saddg')) - - self.get_sub_alarm_definitions_query = ( - select(sad_s, - saddg.c.dimensions) .select_from( - sad_s.outerjoin( - saddg, - saddg.c.sub_alarm_definition_id == sad_s.c.id)) .where( - sad_s.c.alarm_definition_id == bindparam('b_alarm_definition_id'))) - - self.create_alarm_definition_insert_ad_query = ( - insert(ad) .values( - id=bindparam('b_id'), - tenant_id=bindparam('b_tenant_id'), - name=bindparam('b_name'), - description=bindparam('b_description'), - expression=bindparam('b_expression'), - severity=bindparam('b_severity'), - match_by=bindparam('b_match_by'), - actions_enabled=bindparam('b_actions_enabled'), - created_at=bindparam('b_created_at'), - updated_at=bindparam('b_updated_at'))) - - self.create_alarm_definition_insert_sad_query = ( - insert(sad) .values( - id=bindparam('b_id'), - alarm_definition_id=bindparam('b_alarm_definition_id'), - function=bindparam('b_function'), - metric_name=bindparam('b_metric_name'), - operator=bindparam('b_operator'), - threshold=bindparam('b_threshold'), - period=bindparam('b_period'), - periods=bindparam('b_periods'), - is_deterministic=bindparam('b_is_deterministic'), - created_at=bindparam('b_created_at'), - updated_at=bindparam('b_updated_at'))) - - b_sad_id = bindparam('b_sub_alarm_definition_id') - self.create_alarm_definition_insert_sadd_query = ( - insert(sadd) .values( - sub_alarm_definition_id=b_sad_id, - dimension_name=bindparam('b_dimension_name'), - value=bindparam('b_value'))) - - self.update_or_patch_alarm_definition_update_ad_query = ( - update(ad) .where( - ad.c.tenant_id == bindparam('b_tenant_id')) .where( - ad.c.id == bindparam('b_id'))) - - self.update_or_patch_alarm_definition_delete_sad_query = ( - delete(sad) .where(sad.c.id == bindparam('b_id'))) - - self.update_or_patch_alarm_definition_update_sad_query = ( - update(sad) .where( - sad.c.id == bindparam('b_id')) .values( - operator=bindparam('b_operator'), - threshold=bindparam('b_threshold'), - is_deterministic=bindparam('b_is_deterministic'), - updated_at=bindparam('b_updated_at'))) - - b_ad_id = bindparam('b_alarm_definition_id'), - self.update_or_patch_alarm_definition_insert_sad_query = ( - insert(sad) .values( - id=bindparam('b_id'), - alarm_definition_id=b_ad_id, - function=bindparam('b_function'), - metric_name=bindparam('b_metric_name'), - operator=bindparam('b_operator'), - threshold=bindparam('b_threshold'), - period=bindparam('b_period'), - periods=bindparam('b_periods'), - is_deterministic=bindparam('b_is_deterministic'), - created_at=bindparam('b_created_at'), - updated_at=bindparam('b_updated_at'))) - - self.update_or_patch_alarm_definition_insert_sadd_query = ( - insert(sadd) .values( - sub_alarm_definition_id=b_sad_id, - dimension_name=bindparam('b_dimension_name'), - value=bindparam('b_value'))) - - self.delete_aa_query = (delete(aa) - .where(aa.c.alarm_definition_id == - bindparam('b_alarm_definition_id'))) - - self.delete_aa_state_query = ( - delete(aa) .where( - aa.c.alarm_definition_id == bindparam('b_alarm_definition_id')) .where( - aa.c.alarm_state == bindparam('b_alarm_state'))) - - self.select_nm_query = (select(nm_s.c.id) - .select_from(nm_s) - .where(nm_s.c.id == bindparam('b_id'))) - - self.insert_aa_query = (insert(aa) - .values( - alarm_definition_id=bindparam('b_alarm_definition_id'), - alarm_state=bindparam('b_alarm_state'), - action_id=bindparam('b_action_id'))) - - @sql_repository.sql_try_catch_block - def get_alarm_definition(self, tenant_id, _id): - with self._db_engine.connect() as conn: - return self._get_alarm_definition(conn, tenant_id, _id) - - def _get_alarm_definition(self, conn, tenant_id, _id): - ad = self.ad_s - query = (self.base_query - .select_from(self.base_query_from) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(ad.c.id == bindparam('b_id')) - .where(ad.c.deleted_at == null())) - - row = conn.execute( - query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - } - ).fetchone() - - if row is not None: - return row._mapping - else: - raise exceptions.DoesNotExistException - - @sql_repository.sql_try_catch_block - def get_alarm_definitions(self, tenant_id, name=None, dimensions=None, severity=None, - sort_by=None, offset=None, limit=1000): - - with self._db_engine.connect() as conn: - ad = self.ad_s - sad = self.sad.alias('sad') - sadd = self.sadd.alias('sadd') - query_from = self.base_query_from - - parms = {'b_tenant_id': tenant_id} - - if dimensions: - sadi = sad.c.alarm_definition_id - query_from = query_from.join(sad, sadi == ad.c.id) - - i = 0 - for n, v in dimensions.items(): - bind_dimension_name = 'b_sadd_dimension_name_{}'.format(i) - bind_value = 'b_sadd_value_{}'.format(i) - sadd_ = (select(sadd.c.sub_alarm_definition_id) - .select_from(sadd) - .where(sadd.c.dimension_name == bindparam(bind_dimension_name)) - .where(sadd.c.value == bindparam(bind_value)) - .distinct().alias('saad_{}'.format(i))) - - sadd_id = sadd_.c.sub_alarm_definition_id - query_from = query_from.join(sadd_, sadd_id == sad.c.id) - parms[bind_dimension_name] = n.encode('utf8') if six.PY2 else n - parms[bind_value] = v.encode('utf8') if six.PY2 else v - - i += 1 - - query = (self.base_query - .select_from(query_from) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(ad.c.deleted_at == null())) - - if name: - query = query.where(ad.c.name == bindparam('b_name')) - parms['b_name'] = name.encode('utf-8') if six.PY2 else name - - if severity: - severities = severity.split('|') - query = query.where(or_(ad.c.severity == bindparam( - 'b_severity' + str(i)) for i in range(len(severities)))) - for i, s in enumerate(severities): - parms['b_severity' + str(i)] = s.encode('utf-8') if six.PY2 else s - - order_columns = [] - if sort_by is not None: - order_columns = [literal_column('ad.' + col) for col in sort_by] - if 'id' not in sort_by: - order_columns.append(ad.c.id) - else: - order_columns = [ad.c.id] - - if offset: - query = query.offset(bindparam('b_offset')) - parms['b_offset'] = offset - - query = query.order_by(*order_columns) - - query = query.limit(bindparam('b_limit')) - - parms['b_limit'] = limit + 1 - - return [row._mapping for row in conn.execute(query, parms).fetchall()] - - @sql_repository.sql_try_catch_block - def get_sub_alarms(self, tenant_id, alarm_definition_id): - - with self._db_engine.connect() as conn: - return [row._mapping for row in conn.execute(self.get_sub_alarms_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_definition_id - }).fetchall()] - - @sql_repository.sql_try_catch_block - def get_alarm_metrics(self, tenant_id, alarm_definition_id): - with self._db_engine.connect() as conn: - return [row._mapping for row in conn.execute(self.get_alarm_metrics_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_definition_id - }).fetchall()] - - @sql_repository.sql_try_catch_block - def delete_alarm_definition(self, tenant_id, alarm_definition_id): - """Soft delete the alarm definition. - - Soft delete the alarm definition and hard delete any associated - alarms. - - :param tenant_id: - :param alarm_definition_id: - :returns True: -- if alarm definition exists and was deleted. - :returns False: -- if the alarm definition does not exists. - :raises RepositoryException: - """ - - with self._db_engine.begin() as conn: - cursor = conn.execute( - self.soft_delete_ad_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_definition_id - } - ) - - if cursor.rowcount < 1: - return False - - conn.execute( - self.delete_a_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_definition_id - } - ) - - return True - - @sql_repository.sql_try_catch_block - def get_sub_alarm_definitions(self, alarm_definition_id): - with self._db_engine.connect() as conn: - return self._get_sub_alarm_definitions(conn, alarm_definition_id) - - def _get_sub_alarm_definitions(self, conn, alarm_definition_id): - return [ - row._mapping for row in conn.execute( - self.get_sub_alarm_definitions_query, - parameters={ - 'b_alarm_definition_id': alarm_definition_id - } - ).fetchall() - ] - - @sql_repository.sql_try_catch_block - def create_alarm_definition(self, tenant_id, name, expression, - sub_expr_list, description, severity, match_by, - alarm_actions, undetermined_actions, - ok_actions): - with self._db_engine.begin() as conn: - - now = datetime.datetime.utcnow() - alarm_definition_id = uuidutils.generate_uuid() - - conn.execute( - self.create_alarm_definition_insert_ad_query, - parameters={ - 'b_id': alarm_definition_id, - 'b_tenant_id': tenant_id, - 'b_name': name.encode('utf8'), - 'b_description': description.encode('utf8'), - 'b_expression': expression.encode('utf8'), - 'b_severity': severity.upper().encode('utf8'), - 'b_match_by': ",".join(match_by).encode('utf8'), - 'b_actions_enabled': True, - 'b_created_at': now, - 'b_updated_at': now - } - ) - - for sub_expr in sub_expr_list: - sub_alarm_definition_id = uuidutils.generate_uuid() - sub_expr.id = sub_alarm_definition_id - metric_name = sub_expr.metric_name.encode("utf8") - operator = sub_expr.normalized_operator.encode('utf8') - conn.execute( - self.create_alarm_definition_insert_sad_query, - parameters={ - 'b_id': sub_alarm_definition_id, - 'b_alarm_definition_id': alarm_definition_id, - 'b_function': sub_expr.normalized_func.encode('utf8'), - 'b_metric_name': metric_name, - 'b_operator': operator, - 'b_threshold': sub_expr.threshold, - 'b_period': sub_expr.period, - 'b_periods': sub_expr.periods, - 'b_is_deterministic': sub_expr.deterministic, - 'b_created_at': now, - 'b_updated_at': now - } - ) - - for dimension in sub_expr.dimensions_as_list: - parsed_dimension = dimension.split('=') - query = self.create_alarm_definition_insert_sadd_query - sadi = sub_alarm_definition_id - dimension_name = parsed_dimension[0].encode('utf8') - conn.execute( - query, - parameters={ - 'b_sub_alarm_definition_id': sadi, - 'b_dimension_name': dimension_name, - 'b_value': parsed_dimension[1].encode('utf8') - } - ) - - self._insert_into_alarm_action(conn, alarm_definition_id, - alarm_actions, u"ALARM") - self._insert_into_alarm_action(conn, alarm_definition_id, - undetermined_actions, - u"UNDETERMINED") - self._insert_into_alarm_action(conn, alarm_definition_id, - ok_actions, u"OK") - - return alarm_definition_id - - @sql_repository.sql_try_catch_block - def update_or_patch_alarm_definition(self, tenant_id, alarm_definition_id, - name, expression, - sub_expr_list, actions_enabled, - description, alarm_actions, - ok_actions, undetermined_actions, - match_by, severity, patch=False): - - with self._db_engine.begin() as conn: - original_row = self._get_alarm_definition(conn, - tenant_id, - alarm_definition_id) - rows = self._get_sub_alarm_definitions(conn, alarm_definition_id) - - old_sub_alarm_defs_by_id = {} - - for row in rows: - sad = sub_alarm_definition.SubAlarmDefinition(row=row) - old_sub_alarm_defs_by_id[sad.id] = sad - - if expression: - ( - changed_sub_alarm_defs_by_id, - new_sub_alarm_defs_by_id, - old_sub_alarm_defs_by_id, - unchanged_sub_alarm_defs_by_id - ) = self._determine_sub_expr_changes( - alarm_definition_id, old_sub_alarm_defs_by_id, - sub_expr_list) - - if old_sub_alarm_defs_by_id or new_sub_alarm_defs_by_id: - new_count = (len(new_sub_alarm_defs_by_id) + - len(changed_sub_alarm_defs_by_id) + - len(unchanged_sub_alarm_defs_by_id)) - old_count = len(old_sub_alarm_defs_by_id) - if new_count != old_count: - msg = 'number of subexpressions must not change' - else: - msg = 'metrics in subexpression must not change' - raise exceptions.InvalidUpdateException(msg.encode('utf8')) - else: - unchanged_sub_alarm_defs_by_id = old_sub_alarm_defs_by_id - changed_sub_alarm_defs_by_id = {} - new_sub_alarm_defs_by_id = {} - old_sub_alarm_defs_by_id = {} - - # Get a common update time - now = datetime.datetime.utcnow() - - if name is None: - new_name = original_row['name'] - else: - new_name = name.encode('utf-8') if six.PY2 else name - - if description is None: - if patch: - new_description = original_row['description'] - else: - new_description = '' - else: - new_description = description.encode('utf-8') if six.PY2 else description - - if expression is None: - new_expression = original_row['expression'] - else: - new_expression = expression.encode('utf8') if six.PY2 else expression - - if severity is None: - if patch: - new_severity = original_row['severity'] - else: - new_severity = 'LOW' - else: - new_severity = severity.encode('utf8') if six.PY2 else severity - - if match_by is None: - if patch: - new_match_by = original_row['match_by'] - else: - new_match_by = None - else: - match = ",".join(match_by) - new_match_by = match.encode('utf8') if six.PY2 else match - - if new_match_by != original_row['match_by']: - msg = u"match_by must not change" - raise exceptions.InvalidUpdateException(msg) - - if actions_enabled is None: - new_actions_enabled = original_row['actions_enabled'] - else: - new_actions_enabled = actions_enabled - - conn.execute(self.update_or_patch_alarm_definition_update_ad_query - .values( - name=bindparam('b_name'), - description=bindparam('b_description'), - expression=bindparam('b_expression'), - match_by=bindparam('b_match_by'), - severity=bindparam('b_severity'), - actions_enabled=bindparam('b_actions_enabled'), - updated_at=bindparam('b_updated_at') - ), - parameters={ - 'b_name': new_name, - 'b_description': new_description, - 'b_expression': new_expression, - 'b_match_by': new_match_by, - 'b_severity': new_severity, - 'b_actions_enabled': bool(new_actions_enabled), - 'b_updated_at': now, - 'b_tenant_id': tenant_id, - 'b_id': alarm_definition_id - }) - parms = [] - for sub_alarm_definition_id, sub_alarm_def in ( - changed_sub_alarm_defs_by_id.items()): - parms.append({'b_operator': sub_alarm_def.operator, - 'b_threshold': sub_alarm_def.threshold, - 'b_is_deterministic': sub_alarm_def.deterministic, - 'b_updated_at': now, - 'b_id': sub_alarm_definition_id}) - if len(parms) > 0: - query = self.update_or_patch_alarm_definition_update_sad_query - conn.execute(query, parms) - - # Delete old alarm actions - if patch: - if alarm_actions is not None: - self._delete_alarm_actions(conn, alarm_definition_id, - 'ALARM') - if ok_actions is not None: - self._delete_alarm_actions(conn, alarm_definition_id, - 'OK') - if undetermined_actions is not None: - self._delete_alarm_actions(conn, alarm_definition_id, - 'UNDETERMINED') - else: - conn.execute( - self.delete_aa_query, - parameters={ - 'b_alarm_definition_id': alarm_definition_id - } - ) - - # Insert new alarm actions - self._insert_into_alarm_action(conn, alarm_definition_id, - alarm_actions, - u"ALARM") - - self._insert_into_alarm_action(conn, alarm_definition_id, - undetermined_actions, - u"UNDETERMINED") - - self._insert_into_alarm_action(conn, alarm_definition_id, - ok_actions, - u"OK") - - ad = self.ad_s - query = (self.base_query - .select_from(self.base_query_from) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(ad.c.id == bindparam('b_id')) - .where(ad.c.deleted_at == null())) - - updated_row = conn.execute( - query, - parameters={ - 'b_id': alarm_definition_id, - 'b_tenant_id': tenant_id - } - ).fetchone() - - # TODO(thuvh) need return not found exception - if updated_row is None: - raise Exception("Failed to find current alarm definition") - else: - updated_row = updated_row._mapping - - sub_alarm_defs_dict = {'old': old_sub_alarm_defs_by_id, - 'changed': changed_sub_alarm_defs_by_id, - 'new': new_sub_alarm_defs_by_id, - 'unchanged': unchanged_sub_alarm_defs_by_id} - - # Return the alarm def and the sub alarm defs - return updated_row, sub_alarm_defs_dict - - def _determine_sub_expr_changes(self, alarm_definition_id, - old_sub_alarm_defs_by_id, - sub_expr_list): - - old_sub_alarm_defs_set = set( - old_sub_alarm_defs_by_id.values()) - - new_sub_alarm_defs_set = set() - for sub_expr in sub_expr_list: - sad = sub_alarm_definition.SubAlarmDefinition( - sub_expr=sub_expr) - # Inject the alarm definition id. - sad.alarm_definition_id = alarm_definition_id.decode('utf8') if six.PY2 \ - else alarm_definition_id - new_sub_alarm_defs_set.add(sad) - - # Identify old or changed expressions - old_or_changed_sub_alarm_defs_set = ( - old_sub_alarm_defs_set - new_sub_alarm_defs_set) - # Identify new or changed expressions - new_or_changed_sub_alarm_defs_set = ( - new_sub_alarm_defs_set - old_sub_alarm_defs_set) - # Find changed expressions. O(n^2) == bad! - # This algo may not work if sub expressions are duplicated. - changed_sub_alarm_defs_by_id = {} - old_or_changed_sub_alarm_defs_set_to_remove = set() - new_or_changed_sub_alarm_defs_set_to_remove = set() - for old_or_changed in old_or_changed_sub_alarm_defs_set: - for new_or_changed in new_or_changed_sub_alarm_defs_set: - if old_or_changed.same_key_fields(new_or_changed): - old_or_changed_sub_alarm_defs_set_to_remove.add( - old_or_changed - ) - new_or_changed_sub_alarm_defs_set_to_remove.add( - new_or_changed - ) - changed_sub_alarm_defs_by_id[ - old_or_changed.id] = ( - new_or_changed) - # patch id: - changed_sub_alarm_defs_by_id[ - old_or_changed.id].id = old_or_changed.id - old_or_changed_sub_alarm_defs_set = ( - old_or_changed_sub_alarm_defs_set - - old_or_changed_sub_alarm_defs_set_to_remove - ) - new_or_changed_sub_alarm_defs_set = ( - new_or_changed_sub_alarm_defs_set - - new_or_changed_sub_alarm_defs_set_to_remove - ) - # Create the list of unchanged expressions - unchanged_sub_alarm_defs_by_id = ( - old_sub_alarm_defs_by_id.copy()) - for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set: - del unchanged_sub_alarm_defs_by_id[old_sub_alarm_def.id] - for sub_alarm_definition_id in ( - changed_sub_alarm_defs_by_id.keys()): - del unchanged_sub_alarm_defs_by_id[ - sub_alarm_definition_id] - - # Remove old sub expressions - temp = {} - for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set: - temp[old_sub_alarm_def.id] = old_sub_alarm_def - old_sub_alarm_defs_by_id = temp - # Create IDs for new expressions - new_sub_alarm_defs_by_id = {} - for new_sub_alarm_def in new_or_changed_sub_alarm_defs_set: - sub_alarm_definition_id = uuidutils.generate_uuid() - new_sub_alarm_def.id = sub_alarm_definition_id - new_sub_alarm_defs_by_id[sub_alarm_definition_id] = ( - new_sub_alarm_def) - - return (changed_sub_alarm_defs_by_id, - new_sub_alarm_defs_by_id, - old_sub_alarm_defs_by_id, - unchanged_sub_alarm_defs_by_id) - - def _delete_alarm_actions(self, conn, _id, alarm_action_name): - conn.execute( - self.delete_aa_state_query, - parameters={ - 'b_alarm_definition_id': _id, - 'b_alarm_state': alarm_action_name - } - ) - - def _insert_into_alarm_action(self, conn, alarm_definition_id, actions, - alarm_state): - - if actions is None: - return - - for action in actions: - b_id = action.encode('utf8') if six.PY2 else action - row = conn.execute( - self.select_nm_query, - parameters={ - 'b_id': b_id - } - ).fetchone() - if row is None: - raise exceptions.InvalidUpdateException( - "Non-existent notification id {} submitted for {} " - "notification action".format(encodeutils.to_utf8(action), - encodeutils.to_utf8(alarm_state))) - conn.execute( - self.insert_aa_query, - parameters={ - 'b_alarm_definition_id': alarm_definition_id, - 'b_alarm_state': alarm_state.encode('utf8') if six.PY2 else alarm_state, - 'b_action_id': action.encode('utf8') if six.PY2 else action - } - ) diff --git a/monasca_api/common/repositories/sqla/alarms_repository.py b/monasca_api/common/repositories/sqla/alarms_repository.py deleted file mode 100644 index ac3a631c0..000000000 --- a/monasca_api/common/repositories/sqla/alarms_repository.py +++ /dev/null @@ -1,631 +0,0 @@ -# Copyright 2016 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from datetime import datetime -from time import time - -import six - -from monasca_api.common.repositories import alarms_repository -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories.sqla import models -from monasca_api.common.repositories.sqla import sql_repository -from sqlalchemy import (MetaData, update, delete, select, text, column, - bindparam, func, literal_column, asc, desc) -from sqlalchemy import or_ - - -class AlarmsRepository(sql_repository.SQLRepository, - alarms_repository.AlarmsRepository): - - def __init__(self): - super(AlarmsRepository, self).__init__() - - metadata = MetaData() - self.a_du = models.create_a_model(metadata) - self.aa = models.create_aa_model(metadata).alias('aa') - self.sa = models.create_sa_model(metadata).alias('sa') - self.ad = models.create_ad_model(metadata).alias('ad') - self.am = models.create_am_model(metadata).alias('am') - self.md = models.create_md_model(metadata).alias('md') - self.mdd = models.create_mdd_model(metadata).alias('mdd') - self.mde = models.create_mde_model(metadata).alias('mde') - self.sad = models.create_sad_model(metadata).alias('sad') - self.sadd = models.create_sadd_model(metadata).alias('sadd') - a = self.a_du - self.a = a.alias('a') - a_s = self.a - sa = self.sa - ad = self.ad - am = self.am - md = self.md - mdd = self.mdd - mde = self.mde - - gc_columns = [md.c.name + text("'='") + md.c.value] - - mdg = (select(md.c.dimension_set_id, - models.group_concat(gc_columns).label('dimensions')) - .select_from(md) - .group_by(md.c.dimension_set_id).alias('mdg')) - - self.base_query_from = ( - a_s.join( - ad, - ad.c.id == a_s.c.alarm_definition_id) .join( - am, - am.c.alarm_id == a_s.c.id) .join( - mdd, - mdd.c.id == am.c.metric_definition_dimensions_id) .join( - mde, - mde.c.id == mdd.c.metric_definition_id) .outerjoin( - mdg, - mdg.c.dimension_set_id == mdd.c.metric_dimension_set_id)) - - self.base_query = select(a_s.c.id.label('alarm_id'), - a_s.c.state, - a_s.c.state_updated_at. - label('state_updated_timestamp'), - a_s.c.updated_at.label('updated_timestamp'), - a_s.c.created_at.label('created_timestamp'), - a_s.c.lifecycle_state, - a_s.c.link, - ad.c.id.label('alarm_definition_id'), - ad.c.name.label('alarm_definition_name'), - ad.c.severity, - mde.c.name.label('metric_name'), - mdg.c.dimensions.label('metric_dimensions')) - - self.base_subquery_list = (select(a_s.c.id) - .select_from(a_s.join(ad, a_s.c.alarm_definition_id == ad.c.id))) - - self.get_ad_query = (select(ad) - .select_from(ad.join(a, ad.c.id == a.c.alarm_definition_id)) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(a.c.id == bindparam('b_id'))) - - self.get_am_query = (select(a_s.c.id.label('alarm_id'), - mde.c.name, - mdg.c.dimensions) - .select_from(a_s.join(am, am.c.alarm_id == a_s.c.id) - .join(mdd, - mdd.c.id == - am.c.metric_definition_dimensions_id) - .join(mde, mde.c.id == mdd.c.metric_definition_id) - .outerjoin(mdg, - mdg.c.dimension_set_id == - mdd.c.metric_dimension_set_id)) - .where(a_s.c.id == bindparam('b_id')) - .order_by(a_s.c.id) - .distinct()) - - self.get_sa_query = (select(sa.c.id.label('sub_alarm_id'), - sa.c.alarm_id, - sa.c.expression, - ad.c.id.label('alarm_definition_id')) - .select_from(sa.join(a_s, - a_s.c.id == sa.c.alarm_id) - .join(ad, - ad.c.id == a_s.c.alarm_definition_id)) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(a_s.c.id == bindparam('b_id')) - .distinct()) - - self.get_a_query = (select(a_s.c.state, a_s.c.link, a_s.c.lifecycle_state) - .select_from(a_s.join(ad, - ad.c.id == a_s.c.alarm_definition_id)) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(a_s.c.id == bindparam('b_id'))) - - self.get_a_ad_query = (select(a_s.c.id) - .select_from(a_s.join(ad, - ad.c.id == - a_s.c.alarm_definition_id)) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(a_s.c.id == bindparam('b_id')) - .alias('a_ad')) - - select_tmp = (select(literal_column('id')) - .select_from(self.get_a_ad_query) - .distinct() - .alias('temporarytable')) - - self.delete_alarm_query = (delete(a) - .where(a.c.id.in_(select_tmp))) - - md_ = (select(mde.c.id) - .where(mde.c.name == bindparam('b_md_name')).alias('md_')) - - self.get_a_am_query = (select(a_s.c.id) - .select_from(a_s.join(am, - am.c.alarm_id == - a_s.c.id) - .join(mdd, - mdd.c.id == - am.c.metric_definition_dimensions_id) - .join(md_, - md_.c.id == - mdd.c.metric_definition_id))) - - @sql_repository.sql_try_catch_block - def get_alarm_definition(self, tenant_id, alarm_id): - with self._db_engine.connect() as conn: - row = conn.execute( - self.get_ad_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_id - }).fetchone() - - if row is not None: - return row._mapping - else: - raise exceptions.DoesNotExistException - - @sql_repository.sql_try_catch_block - def get_alarm_metrics(self, alarm_id): - - with self._db_engine.connect() as conn: - rows = conn.execute( - self.get_am_query, - parameters={'b_id': alarm_id} - ).fetchall() - return [row._mapping for row in rows] - - @sql_repository.sql_try_catch_block - def get_sub_alarms(self, tenant_id, alarm_id): - - with self._db_engine.connect() as conn: - rows = conn.execute( - self.get_sa_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': alarm_id - } - ).fetchall() - return [row._mapping for row in rows] - - @sql_repository.sql_try_catch_block - def update_alarm(self, tenant_id, _id, state, lifecycle_state, link): - - time_ms = int(round(time() * 1000.0)) - with self._db_engine.connect() as conn: - self.get_a_query.bind = self._db_engine - prev_alarm = conn.execute( - self.get_a_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - } - ).fetchone() - - if prev_alarm is None: - raise exceptions.DoesNotExistException - else: - prev_alarm = prev_alarm._mapping - - parms = {'b_lifecycle_state': lifecycle_state, - 'b_link': link} - set_values = {'lifecycle_state': - bindparam('b_lifecycle_state'), - 'link': bindparam('b_link'), - 'updated_at': func.now()} - # TODO(thuvh) find better solution to get state from row - prev_state = prev_alarm['state'] - if state != prev_state: - parms['b_state'] = state - set_values['state'] = bindparam('b_state') - set_values['state_updated_at'] = func.now() - - parms['b_tenant_id'] = tenant_id - parms['b_id'] = _id - - select_tmp = (select(literal_column('id')) - .select_from(self.get_a_ad_query) - .distinct() - .alias('temporarytable')) - - a = self.a_du - update_query = (update(a) - .values(set_values) - .where(a.c.id.in_(select_tmp))) - - conn.execute(update_query, parms) - - # TODO(thuvh) find a better solution - conn.commit() - - return prev_alarm, time_ms - - @sql_repository.sql_try_catch_block - def delete_alarm(self, tenant_id, _id): - - with self._db_engine.connect() as conn: - cursor = conn.execute( - self.delete_alarm_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - }) - # TODO(thuvh) find a better solution - conn.commit() - - if cursor.rowcount < 1: - raise exceptions.DoesNotExistException - - @sql_repository.sql_try_catch_block - def get_alarm(self, tenant_id, _id): - - with self._db_engine.connect() as conn: - ad = self.ad - a = self.a - query = (self.base_query - .select_from(self.base_query_from) - .where(ad.c.tenant_id == bindparam('b_tenant_id')) - .where(a.c.id == bindparam('b_id')) - .distinct()) - - rows = conn.execute( - query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - } - ).fetchall() - - if rows is None or len(rows) == 0: - raise exceptions.DoesNotExistException - - return [row._mapping for row in rows] - - @sql_repository.sql_try_catch_block - def get_alarms(self, tenant_id, query_parms=None, offset=None, limit=None): - if not query_parms: - query_parms = {} - - with self._db_engine.connect() as conn: - parms = {} - ad = self.ad - am = self.am - mdd = self.mdd - md = self.md - a = self.a - - query = (self.base_subquery_list - .where(ad.c.tenant_id == bindparam('b_tenant_id'))) - - parms['b_tenant_id'] = tenant_id - - if 'alarm_definition_id' in query_parms: - query = query.where(ad.c.id == - bindparam('b_alarm_definition_id')) - parms['b_alarm_definition_id'] = query_parms['alarm_definition_id'] - - if 'metric_name' in query_parms: - query = query.where(a.c.id.in_(self.get_a_am_query)) - parms['b_md_name'] = query_parms['metric_name'].encode('utf8') if six.PY2 else \ - query_parms['metric_name'] - - if 'severity' in query_parms: - severities = query_parms['severity'].split('|') - query = query.where(or_(ad.c.severity == bindparam( - 'b_severity' + str(i)) for i in range(len(severities)))) - for i, s in enumerate(severities): - parms['b_severity' + str(i)] = s if six.PY3 else s.encode('utf-8') - - if 'state' in query_parms: - query = query.where(a.c.state == bindparam('b_state')) - parms['b_state'] = query_parms['state'] - - if 'lifecycle_state' in query_parms: - query = (query - .where(a.c.lifecycle_state == - bindparam('b_lifecycle_state'))) - parms['b_lifecycle_state'] = query_parms['lifecycle_state'] \ - if six.PY3 else query_parms['lifecycle_state'].encode('utf8') - - if 'link' in query_parms: - query = query.where(a.c.link == bindparam('b_link')) - parms['b_link'] = query_parms['link'] if six.PY3 \ - else query_parms['link'].encode('utf8') - - if 'state_updated_start_time' in query_parms: - query = (query - .where(a.c.state_updated_at >= - bindparam('b_state_updated_at'))) - - date_str = query_parms['state_updated_start_time'] if six.PY3 \ - else query_parms['state_updated_start_time'].encode('utf8') - date_param = datetime.strptime(date_str, - '%Y-%m-%dT%H:%M:%S.%fZ') - parms['b_state_updated_at'] = date_param - - if 'metric_dimensions' in query_parms: - sub_query = select(a.c.id) - sub_query_from = (a.join(am, am.c.alarm_id == a.c.id) - .join(mdd, - mdd.c.id == - am.c.metric_definition_dimensions_id)) - - sub_query_md_base = select(md.c.dimension_set_id).select_from(md) - - for i, metric_dimension in enumerate(query_parms['metric_dimensions'].items()): - - md_name = "b_md_name_{}".format(i) - - values_cond = None - values_cond_flag = False - - if metric_dimension and metric_dimension[1]: - if '|' in metric_dimension[1]: - values = metric_dimension[1].encode('utf8').split('|') if six.PY2 else \ - metric_dimension[1].split('|') - sub_values_cond = [] - for j, value in enumerate(values): - sub_md_value = "b_md_value_{}_{}".format(i, j) - sub_values_cond.append(md.c.value == bindparam(sub_md_value)) - parms[sub_md_value] = value - values_cond = or_(*sub_values_cond) - values_cond_flag = True - else: - md_value = "b_md_value_{}".format(i) - values_cond = (md.c.value == bindparam(md_value)) - values_cond_flag = True - parms[md_value] = metric_dimension[1] - - sub_query_md = (sub_query_md_base - .where(md.c.name == bindparam(md_name))) - if values_cond_flag: - sub_query_md = (sub_query_md - .where(values_cond)) - - sub_query_md = (sub_query_md - .distinct() - .alias('md_{}'.format(i))) - - sub_query_from = (sub_query_from - .join(sub_query_md, - sub_query_md.c.dimension_set_id == - mdd.c.metric_dimension_set_id)) - - parms[md_name] = metric_dimension[0].encode('utf8') if six.PY2 else \ - metric_dimension[0] - - sub_query = (sub_query - .select_from(sub_query_from) - .distinct()) - query = query.where(a.c.id.in_(sub_query)) - order_columns = [] - if 'sort_by' in query_parms: - columns_mapper = \ - {'alarm_id': a.c.id, - 'alarm_definition_id': ad.c.id, - 'alarm_definition_name': ad.c.name, - 'state_updated_timestamp': a.c.state_updated_at, - 'updated_timestamp': a.c.updated_at, - 'created_timestamp': a.c.created_at, - 'severity': models.field_sort(ad.c.severity, list(map(text, ["'LOW'", - "'MEDIUM'", - "'HIGH'", - "'CRITICAL'"]))), - 'state': models.field_sort(a.c.state, list(map(text, ["'OK'", - "'UNDETERMINED'", - "'ALARM'"])))} - - order_columns, received_cols = self._remap_columns( - query_parms['sort_by'], columns_mapper) - - if not received_cols.get('alarm_id', False): - order_columns.append(a.c.id) - else: - order_columns = [a.c.id] - - if limit: - query = query.limit(bindparam('b_limit')) - parms['b_limit'] = limit + 1 - - if offset: - query = query.offset(bindparam('b_offset')) - parms['b_offset'] = offset - - query = (query - .order_by(*order_columns) - .alias('alarm_id_list')) - - main_query = (self.base_query - .select_from(self.base_query_from - .join(query, query.c.id == a.c.id)) - .distinct()) - - main_query = main_query.order_by(*order_columns) - - return [row._mapping for row in conn.execute(main_query, parms).fetchall()] - - def _remap_columns(self, columns, columns_mapper): - received_cols = {} - order_columns = [] - for col in columns: - col_values = col.split() - col_name = col_values[0] - order_column = columns_mapper.get(col_name, literal_column(col_name)) - if len(col_values) > 1: - mode = col_values[1] - if mode == 'asc': - order_column = asc(order_column) - elif mode == 'desc': - order_column = desc(order_column) - order_columns.append(order_column) - received_cols[col_name] = True - return order_columns, received_cols - - @sql_repository.sql_try_catch_block - def get_alarms_count(self, tenant_id, query_parms=None, offset=None, limit=None): - if not query_parms: - query_parms = {} - - with self._db_engine.connect() as conn: - parms = {} - ad = self.ad - am = self.am - mdd = self.mdd - mde = self.mde - md = self.md - a = self.a - - query_from = a.join(ad, ad.c.id == a.c.alarm_definition_id) - - parms['b_tenant_id'] = tenant_id - - group_by_columns = [] - - if 'group_by' in query_parms: - group_by_columns = query_parms['group_by'] - sub_group_by_columns = [] - metric_group_by = {'metric_name', - 'dimension_name', - 'dimension_value'}.intersection(set(query_parms['group_by'])) - if metric_group_by: - sub_query_columns = [am.c.alarm_id] - if 'metric_name' in metric_group_by: - sub_group_by_columns.append(mde.c.name.label('metric_name')) - if 'dimension_name' in metric_group_by: - sub_group_by_columns.append(md.c.name.label('dimension_name')) - if 'dimension_value' in metric_group_by: - sub_group_by_columns.append(md.c.value.label('dimension_value')) - - sub_query_columns.extend(sub_group_by_columns) - - sub_query_from = ( - mde.join( - mdd, mde.c.id == mdd.c.metric_definition_id) .join( - md, mdd.c.metric_dimension_set_id == md.c.dimension_set_id) .join( - am, am.c.metric_definition_dimensions_id == mdd.c.id)) - - sub_query = (select(*sub_query_columns) - .select_from(sub_query_from) - .distinct() - .alias('metrics')) - - query_from = query_from.join(sub_query, sub_query.c.alarm_id == a.c.id) - - query_columns = [func.count().label('count')] - query_columns.extend([column(col) for col in group_by_columns]) - - query = (select(*query_columns) - .select_from(query_from) - .where(ad.c.tenant_id == bindparam('b_tenant_id'))) - - parms['b_tenant_id'] = tenant_id - - if 'alarm_definition_id' in query_parms: - parms['b_alarm_definition_id'] = query_parms['alarm_definition_id'] - query = query.where(ad.c.id == bindparam('b_alarm_definition_id')) - - if 'state' in query_parms: - parms['b_state'] = query_parms['state'] if six.PY3 else \ - query_parms['state'].encode('utf8') - query = query.where(a.c.state == bindparam('b_state')) - - if 'severity' in query_parms: - severities = query_parms['severity'].split('|') - query = query.where(or_(ad.c.severity == bindparam( - 'b_severity' + str(i)) for i in range(len(severities)))) - for i, s in enumerate(severities): - parms['b_severity' + str(i)] = s if six.PY3 else s.encode('utf8') - - if 'lifecycle_state' in query_parms: - parms['b_lifecycle_state'] = query_parms['lifecycle_state'] if six.PY3 else \ - query_parms['lifecycle_state'].encode('utf8') - query = query.where(a.c.lifecycle_state == bindparam('b_lifecycle_state')) - - if 'link' in query_parms: - parms['b_link'] = query_parms['link'] if six.PY3 else \ - query_parms['link'].encode('utf8') - query = query.where(a.c.link == bindparam('b_link')) - - if 'state_updated_start_time' in query_parms: - date_str = query_parms['state_updated_start_time'] if six.PY3 \ - else query_parms['state_updated_start_time'].encode('utf8') - date_param = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%fZ') - parms['b_state_updated_at'] = date_param - query = query.where(a.c.state_updated_at >= bindparam('b_state_updated_at')) - - if 'metric_name' in query_parms: - query = query.where(a.c.id.in_(self.get_a_am_query)) - parms['b_md_name'] = query_parms['metric_name'] if six.PY3 else \ - query_parms['metric_name'].encode('utf8') - - if 'metric_dimensions' in query_parms: - sub_query = select(a.c.id) - sub_query_from = (a.join(am, am.c.alarm_id == a.c.id) - .join(mdd, - mdd.c.id == - am.c.metric_definition_dimensions_id)) - - sub_query_md_base = select(md.c.dimension_set_id).select_from(md) - - for i, metric_dimension in enumerate(query_parms['metric_dimensions'].items()): - dimension_value = metric_dimension[1] if six.PY3 else \ - metric_dimension[1].encode('utf8') - - if '|' in dimension_value: - dimension_value = tuple(dimension_value.split('|')) - - md_name = "b_md_name_{}".format(i) - md_value = "b_md_value_{}".format(i) - - sub_query_md = (sub_query_md_base - .where(md.c.name == bindparam(md_name))) - - if isinstance(dimension_value, tuple): - sub_query_md = (sub_query_md - .where(md.c.value.op('IN')(bindparam(md_value)))) - else: - sub_query_md = (sub_query_md - .where(md.c.value == bindparam(md_value))) - - sub_query_md = (sub_query_md - .distinct() - .alias('md_{}'.format(i))) - - sub_query_from = (sub_query_from - .join(sub_query_md, - sub_query_md.c.dimension_set_id == - mdd.c.metric_dimension_set_id)) - - parms[md_name] = metric_dimension[0] if six.PY3 else \ - metric_dimension[0].encode('utf8') - parms[md_value] = dimension_value - - sub_query = (sub_query - .select_from(sub_query_from) - .distinct()) - query = query.where(a.c.id.in_(sub_query)) - - if group_by_columns: - query = (query - .order_by(*group_by_columns) - .group_by(*group_by_columns)) - - if limit: - query = query.limit(bindparam('b_limit')) - parms['b_limit'] = limit + 1 - - if offset: - query = query.offset(bindparam('b_offset')) - parms['b_offset'] = offset - - query = query.distinct() - return [row._mapping for row in conn.execute(query, parms).fetchall()] diff --git a/monasca_api/common/repositories/sqla/models.py b/monasca_api/common/repositories/sqla/models.py deleted file mode 100644 index 0e52b0e46..000000000 --- a/monasca_api/common/repositories/sqla/models.py +++ /dev/null @@ -1,265 +0,0 @@ -# Copyright 2015 Robin Hood -# Copyright 2016 FUJITSU LIMITED -# (C) Copyright 2016 Hewlett Packard Enterprise Development LP -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from sqlalchemy import Column -from sqlalchemy.ext import compiler -from sqlalchemy.sql import expression -from sqlalchemy import String, DateTime, Boolean, Integer, LargeBinary, Float -from sqlalchemy import MetaData -from sqlalchemy import Table - - -def get_all_metadata(): - """Return metadata for full data model - - This metadata is used for autogenerating a complete (works on an empty - database) schema migration. To ensure this mechanism keeps working please - invoke any new model creation methods you add from this function as well. - - """ - metadata = MetaData() - create_a_model(metadata) - create_aa_model(metadata) - create_ad_model(metadata) - create_am_model(metadata) - create_nm_model(metadata) - create_md_model(metadata) - create_mde_model(metadata) - create_mdd_model(metadata) - create_sa_model(metadata) - create_sad_model(metadata) - create_sadd_model(metadata) - create_nmt_model(metadata) - - return metadata - - -def create_a_model(metadata=None): - return Table('alarm', metadata, - Column('id', String(36)), - Column('alarm_definition_id', String(36)), - Column('state', String(20)), - Column('lifecycle_state', String(50)), - Column('link', String(512)), - Column('created_at', DateTime), - Column('state_updated_at', DateTime), - Column('updated_at', DateTime)) - - -def create_sadd_model(metadata=None): - return Table('sub_alarm_definition_dimension', metadata, - Column('sub_alarm_definition_id', String(36)), - Column('dimension_name', String(255)), - Column('value', String(255))) - - -def create_aa_model(metadata=None): - return Table('alarm_action', metadata, - Column('alarm_definition_id', String(36)), - Column('alarm_state', String(20)), - Column('action_id', String(36))) - - -def create_md_model(metadata=None): - return Table('metric_dimension', metadata, - Column('dimension_set_id', LargeBinary), - Column('name', String(255)), - Column('value', String(255))) - - -def create_mde_model(metadata=None): - return Table('metric_definition', metadata, - Column('id', LargeBinary), - Column('name', String(255)), - Column('tenant_id', String(255)), - Column('region', String(255))) - - -def create_nm_model(metadata=None): - return Table('notification_method', metadata, - Column('id', String(36)), - Column('tenant_id', String(36)), - Column('name', String(250)), - Column('type', String(20)), - Column('address', String(512)), - Column('period', Integer), - Column('created_at', DateTime), - Column('updated_at', DateTime)) - - -def create_nmt_model(metadata=None): - return Table('notification_method_type', metadata, - Column('name', String(20), primary_key=True)) - - -def create_mdd_model(metadata=None): - return Table('metric_definition_dimensions', metadata, - Column('id', LargeBinary), - Column('metric_definition_id', LargeBinary), - Column('metric_dimension_set_id', LargeBinary)) - - -def create_am_model(metadata=None): - return Table('alarm_metric', metadata, - Column('alarm_id', String(36)), - Column('metric_definition_dimensions_id', LargeBinary)) - - -def create_ad_model(metadata=None): - return Table('alarm_definition', metadata, - Column('id', String(36)), - Column('tenant_id', String(36)), - Column('name', String(255)), - Column('description', String(255)), - Column('expression', String), - Column('severity', String(20)), - Column('match_by', String(255)), - Column('actions_enabled', Boolean), - Column('created_at', DateTime), - Column('updated_at', DateTime), - Column('deleted_at', DateTime)) - - -def create_sa_model(metadata=None): - return Table('sub_alarm', metadata, - Column('id', String(36)), - Column('alarm_id', String(36)), - Column('sub_expression_id', String(36)), - Column('expression', String), - Column('created_at', DateTime), - Column('updated_at', DateTime)) - - -def create_sad_model(metadata=None): - return Table('sub_alarm_definition', metadata, - Column('id', String(36)), - Column('alarm_definition_id', String(36)), - Column('function', String(10)), - Column('metric_name', String(100)), - Column('operator', String(5)), - Column('threshold', Float), - Column('period', Integer), - Column('periods', Integer), - Column('is_deterministic', Boolean), - Column('created_at', DateTime), - Column('updated_at', DateTime)) - - -class group_concat(expression.ColumnElement): - name = "group_concat" - order_by = None - separator = ',' - columns = [] - - def __init__(self, columns, separator=',', order_by=None): - self.order_by = order_by - self.separator = separator - self.columns = columns - - -@compiler.compiles(group_concat, 'oracle') -def _group_concat_oracle(element, compiler_, **kw): - str_order_by = '' - if element.order_by is not None and len(element.order_by) > 0: - str_order_by = "ORDER BY {0}".format( - ", ".join([compiler_.process(x) for x in element.order_by])) - else: - str_order_by = "ORDER BY {0}".format( - ", ".join([compiler_.process(x) for x in element.columns])) - return "LISTAGG({0}, '{2}') WITHIN GROUP ({1})".format( - ", ".join([compiler_.process(x) for x in element.columns]), - str_order_by, - element.separator, - ) - - -@compiler.compiles(group_concat, 'postgresql') -def _group_concat_postgresql(element, compiler_, **kw): - str_order_by = '' - if element.order_by is not None and len(element.order_by) > 0: - str_order_by = "ORDER BY {0}".format( - ", ".join([compiler_.process(x) for x in element.order_by])) - - return "STRING_AGG({0}, '{2}' {1})".format( - ", ".join([compiler_.process(x) for x in element.columns]), - str_order_by, - element.separator, - ) - - -@compiler.compiles(group_concat, 'sybase') -def _group_concat_sybase(element, compiler_, **kw): - return "LIST({0}, '{1}')".format( - ", ".join([compiler_.process(x) for x in element.columns]), - element.separator, - ) - - -@compiler.compiles(group_concat, 'mysql') -def _group_concat_mysql(element, compiler_, **kw): - str_order_by = '' - if element.order_by is not None and len(element.order_by) > 0: - str_order_by = "ORDER BY {0}".format( - ",".join([compiler_.process(x) for x in element.order_by])) - return "GROUP_CONCAT({0} {1} SEPARATOR '{2}')".format( - ", ".join([compiler_.process(x) for x in element.columns]), - str_order_by, - element.separator, - ) - - -@compiler.compiles(group_concat) -def _group_concat_default(element, compiler_, **kw): - return "GROUP_CONCAT({0}, '{1}')".format( - ", ".join([compiler_.process(x) for x in element.columns]), - element.separator, - ) - - -class field_sort(expression.ColumnElement): - name = "field_sort" - column = None - fields = [] - - def __init__(self, column, fields): - self.column = column - self.fields = fields - - -@compiler.compiles(field_sort, "mysql") -def _field_sort_mysql(element, compiler_, **kw): - if element.fields: - return "FIELD({0}, {1})".format(compiler_.process(element.column), - ", ".join(map(compiler_.process, - element.fields))) - else: - return str(compiler_.process(element.column)) - - -@compiler.compiles(field_sort) -def _field_sort_general(element, compiler_, **kw): - fields_list = [] - fields = list(element.fields) - if fields: - fields_list.append("CASE") - for idx, field in enumerate(fields): - fields_list.append("WHEN {0}={1} THEN {2}".format(compiler_.process(element.column), - compiler_.process(field), - idx)) - fields_list.append("ELSE {0}".format(len(fields))) - fields_list.append("END") - return " ".join(fields_list) diff --git a/monasca_api/common/repositories/sqla/notification_method_type_repository.py b/monasca_api/common/repositories/sqla/notification_method_type_repository.py deleted file mode 100644 index 7268c4db0..000000000 --- a/monasca_api/common/repositories/sqla/notification_method_type_repository.py +++ /dev/null @@ -1,41 +0,0 @@ -# (C) Copyright 2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from monasca_api.common.repositories import notification_method_type_repository as nr -from monasca_api.common.repositories.sqla import models -from monasca_api.common.repositories.sqla import sql_repository -from sqlalchemy import MetaData -from sqlalchemy import select - - -class NotificationMethodTypeRepository(sql_repository.SQLRepository, - nr.NotificationMethodTypeRepository): - def __init__(self): - - super(NotificationMethodTypeRepository, self).__init__() - - metadata = MetaData() - self.nmt = models.create_nmt_model(metadata) - - nmt = self.nmt - self._nmt_query = select(nmt.c.name) - - @sql_repository.sql_try_catch_block - def list_notification_method_types(self): - - with self._db_engine.connect() as conn: - notification_method_types = conn.execute(self._nmt_query).fetchall() - - return [row[0] for row in notification_method_types] diff --git a/monasca_api/common/repositories/sqla/notifications_repository.py b/monasca_api/common/repositories/sqla/notifications_repository.py deleted file mode 100644 index f47ba4cef..000000000 --- a/monasca_api/common/repositories/sqla/notifications_repository.py +++ /dev/null @@ -1,252 +0,0 @@ -# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development Company LP -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -from oslo_utils import uuidutils -import six - -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories import notifications_repository as nr -from monasca_api.common.repositories.sqla import models -from monasca_api.common.repositories.sqla import sql_repository -from sqlalchemy import MetaData, update, insert, delete -from sqlalchemy import select, bindparam, func, and_, literal_column - - -class NotificationsRepository(sql_repository.SQLRepository, - nr.NotificationsRepository): - def __init__(self): - - super(NotificationsRepository, self).__init__() - - metadata = MetaData() - self.nm = models.create_nm_model(metadata) - - nm = self.nm - - self._select_nm_count_name_query = (select(func.count()) - .select_from(nm) - .where( - and_(nm.c.tenant_id == bindparam('b_tenant_id'), - nm.c.name == bindparam('b_name')))) - - self._select_nm_count_id_query = (select(func.count()) - .select_from(nm) - .where( - and_(nm.c.tenant_id == bindparam('b_tenant_id'), - nm.c.id == bindparam('b_id')))) - - self._insert_nm_query = (insert(nm) - .values( - id=bindparam('b_id'), - tenant_id=bindparam('b_tenant_id'), - name=bindparam('b_name'), - type=bindparam('b_type'), - address=bindparam('b_address'), - period=bindparam('b_period'), - created_at=bindparam('b_created_at'), - updated_at=bindparam('b_updated_at'))) - - self._delete_nm_query = (delete(nm) - .where(nm.c.tenant_id == bindparam('b_tenant_id')) - .where(nm.c.id == bindparam('b_id'))) - - self._update_nm_query = (update(nm) - .where(nm.c.tenant_id == bindparam('b_tenant_id')) - .where(nm.c.id == bindparam('b_id')) - .values( - name=bindparam('b_name'), - type=bindparam('b_type'), - address=bindparam('b_address'), - period=bindparam('b_period'), - updated_at=bindparam('b_updated_at'))) - - self._select_nm_id_query = (select(nm) - .where( - and_(nm.c.tenant_id == bindparam('b_tenant_id'), - nm.c.id == bindparam('b_id')))) - - self._select_nm_name_query = (select(nm) - .where( - and_(nm.c.tenant_id == bindparam('b_tenant_id'), - nm.c.name == bindparam('b_name')))) - - def create_notification(self, tenant_id, name, - notification_type, address, period): - - with self._db_engine.connect() as conn: - row = conn.execute( - self._select_nm_count_name_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_name': name.encode('utf8') - } - ).fetchone() - - if int(row[0]) > 0: - raise exceptions.AlreadyExistsException('Notification already ' - 'exists') - - now = datetime.datetime.utcnow() - notification_id = uuidutils.generate_uuid() - - conn.execute( - self._insert_nm_query, - parameters={ - 'b_id': notification_id, - 'b_tenant_id': tenant_id, - 'b_name': name.encode('utf8'), - 'b_type': notification_type.encode('utf8'), - 'b_address': address.encode('utf8'), - 'b_period': period, - 'b_created_at': now, - 'b_updated_at': now - } - ) - - # TODO(thuvh) need check better solution - conn.commit() - - return notification_id - - @sql_repository.sql_try_catch_block - def list_notifications(self, tenant_id, sort_by, offset, limit): - - rows = [] - - with self._db_engine.connect() as conn: - nm = self.nm - - select_nm_query = (select(nm) - .where(nm.c.tenant_id == bindparam('b_tenant_id'))) - - parms = {'b_tenant_id': tenant_id} - - if sort_by is not None: - order_columns = [literal_column(col) for col in sort_by] - if 'id' not in sort_by: - order_columns.append(nm.c.id) - else: - order_columns = [nm.c.id] - - select_nm_query = select_nm_query.order_by(*order_columns) - - select_nm_query = (select_nm_query - .order_by(nm.c.id) - .limit(bindparam('b_limit'))) - - parms['b_limit'] = limit + 1 - - if offset: - select_nm_query = select_nm_query.offset(bindparam('b_offset')) - parms['b_offset'] = offset - - rows = conn.execute(select_nm_query, parms).fetchall() - - return [row._mapping for row in rows] - - @sql_repository.sql_try_catch_block - def delete_notification(self, tenant_id, _id): - - with self._db_engine.connect() as conn: - - row = conn.execute( - self._select_nm_count_id_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - } - ).fetchone() - - if int(row[0]) < 1: - raise exceptions.DoesNotExistException - - conn.execute( - self._delete_nm_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': _id - } - ) - - # TODO(thuvh) need check better solution - conn.commit() - - @sql_repository.sql_try_catch_block - def list_notification(self, tenant_id, notification_id): - - with self._db_engine.connect() as conn: - - row = conn.execute( - self._select_nm_id_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_id': notification_id - } - ).fetchone() - - if row is not None: - return row._mapping - else: - raise exceptions.DoesNotExistException - - @sql_repository.sql_try_catch_block - def find_notification_by_name(self, tenant_id, name): - name = name if six.PY3 else name.encode('utf8') - with self._db_engine.connect() as conn: - row = conn.execute( - self._select_nm_name_query, - parameters={ - 'b_tenant_id': tenant_id, - 'b_name': name - } - ).fetchone() - - if row is not None: - return row._mapping - else: - raise exceptions.DoesNotExistException('Not Found') - - @sql_repository.sql_try_catch_block - def update_notification( - self, - notification_id, - tenant_id, - name, - notification_type, - address, - period): - with self._db_engine.connect() as conn: - now = datetime.datetime.utcnow() - - cursor = conn.execute( - self._update_nm_query, - parameters={ - 'b_id': notification_id, - 'b_tenant_id': tenant_id, - 'b_name': name.encode('utf8'), - 'b_type': notification_type.encode('utf8'), - 'b_address': address.encode('utf8'), - 'b_period': period, - 'b_updated_at': now - } - ) - - # TODO(thuvh) need check better solution - conn.commit() - - if cursor.rowcount < 1: - raise exceptions.DoesNotExistException('Not Found') diff --git a/monasca_api/common/repositories/sqla/sql_repository.py b/monasca_api/common/repositories/sqla/sql_repository.py deleted file mode 100644 index 4bedbbe94..000000000 --- a/monasca_api/common/repositories/sqla/sql_repository.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_db.sqlalchemy import enginefacade -from oslo_log import log - -import sqlalchemy - -from monasca_api.common.repositories import exceptions - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -def _get_db_conf(conf_group, connection=None): - return dict( - connection=connection or conf_group.connection, - slave_connection=conf_group.slave_connection, - sqlite_fk=False, - __autocommit=True, - expire_on_commit=False, - mysql_sql_mode=conf_group.mysql_sql_mode, - connection_recycle_time=conf_group.connection_recycle_time, - connection_debug=conf_group.connection_debug, - max_pool_size=conf_group.max_pool_size, - max_overflow=conf_group.max_overflow, - pool_timeout=conf_group.pool_timeout, - sqlite_synchronous=conf_group.sqlite_synchronous, - connection_trace=conf_group.connection_trace, - max_retries=conf_group.max_retries, - retry_interval=conf_group.retry_interval - ) - - -def create_context_manager(connection=None): - """Create a database context manager object. - - :param connection: The database connection string - """ - ctxt_mgr = enginefacade.transaction_context() - ctxt_mgr.configure(**_get_db_conf(CONF.database, connection=connection)) - return ctxt_mgr - - -def get_engine(use_slave=False, connection=None): - """Get a database engine object. - - :param use_slave: Whether to use the slave connection - :param connection: The database connection string - """ - ctxt_mgr = create_context_manager(connection=connection) - if use_slave: - return ctxt_mgr.reader.get_engine() - return ctxt_mgr.writer.get_engine() - - -class SQLRepository(object): - - def __init__(self): - - try: - super(SQLRepository, self).__init__() - self.conf = CONF - if not hasattr(self.conf, 'sql_engine'): - self.conf.sql_engine = get_engine() - self._db_engine = self.conf.sql_engine - self.metadata = sqlalchemy.MetaData() - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - -def sql_try_catch_block(fun): - def try_it(*args, **kwargs): - - try: - - return fun(*args, **kwargs) - - except exceptions.DoesNotExistException: - raise - except exceptions.InvalidUpdateException: - raise - except exceptions.AlreadyExistsException: - raise - except Exception as ex: - LOG.exception(ex) - raise - # exceptions.RepositoryException(ex) - - return try_it diff --git a/monasca_api/common/rest/__init__.py b/monasca_api/common/rest/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/common/rest/exceptions.py b/monasca_api/common/rest/exceptions.py deleted file mode 100644 index 7054ff979..000000000 --- a/monasca_api/common/rest/exceptions.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2015 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class UnsupportedContentTypeException(Exception): - """Exception thrown if content type is not supported.""" - pass - - -class UnreadableContentError(IOError): - """Exception thrown if reading data fails - - :py:class`.UnreadableContentError` may be thrown - if data was impossible to read from input - - """ - pass - - -class DataConversionException(Exception): - """Exception thrown if data transformation fails - - :py:class`.DataConversionException` may be thrown - if data was impossible to transform into target - representation according to content_type classifier. - - """ - pass diff --git a/monasca_api/common/rest/utils.py b/monasca_api/common/rest/utils.py deleted file mode 100644 index f6c33b35a..000000000 --- a/monasca_api/common/rest/utils.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2015 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import simplejson as json -import six - -from monasca_api.common.rest import exceptions - -ENCODING = 'utf8' - -TEXT_CONTENT_TYPE = 'text/plain' -JSON_CONTENT_TYPE = 'application/json' - - -def _try_catch(fun): - - @six.wraps(fun) - def wrapper(*args, **kwargs): - try: - return fun(*args, **kwargs) - except Exception as ex: - raise exceptions.DataConversionException(str(ex)) - - return wrapper - - -@_try_catch -def as_json(data, **kwargs): - """Writes data as json. - - :param dict data: data to convert to json - :param kwargs kwargs: kwargs for json dumps - :return: json string - :rtype: str - """ - - if 'sort_keys' not in kwargs: - kwargs['sort_keys'] = False - if 'ensure_ascii' not in kwargs: - kwargs['ensure_ascii'] = False - - data = json.dumps(data, **kwargs) - - return data - - -@_try_catch -def from_json(data, **kwargs): - """Reads data from json str. - - :param str data: data to read - :param kwargs kwargs: kwargs for json loads - :return: read data - :rtype: dict - """ - return json.loads(data, **kwargs) - - -_READABLE_CONTENT_TYPES = { - TEXT_CONTENT_TYPE: lambda content: content, - JSON_CONTENT_TYPE: from_json -} - - -def read_body(payload, content_type=JSON_CONTENT_TYPE): - """Reads HTTP payload according to given content_type. - - Function is capable of reading from payload stream. - Read data is then processed according to content_type. - - Note: - Content-Type is validated. It means that if read_body - body is not capable of reading data in requested type, - it will throw an exception. - - If read data was empty method will return false boolean - value to indicate that. - - Note: - There is no transformation if content type is equal to - 'text/plain'. What has been read is returned. - - :param stream payload: payload to read, payload should have read method - :param str content_type: payload content type, default to application/json - :return: read data, returned type depends on content_type or False - if empty - - :exception: :py:class:`.UnreadableBody` - in case of any failure when - reading data - - """ - if content_type not in _READABLE_CONTENT_TYPES: - msg = ('Cannot read %s, not in %s' % - (content_type, _READABLE_CONTENT_TYPES)) - raise exceptions.UnsupportedContentTypeException(msg) - - try: - content = payload.read() - if not content: - return None - except Exception as ex: - raise exceptions.UnreadableContentError(str(ex)) - - return _READABLE_CONTENT_TYPES[content_type](content) diff --git a/monasca_api/conf/__init__.py b/monasca_api/conf/__init__.py deleted file mode 100644 index 1cac2a273..000000000 --- a/monasca_api/conf/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import pkgutil - -from oslo_config import cfg -from oslo_db import options as oslo_db_opts -from oslo_log import log -from oslo_utils import importutils - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def load_conf_modules(): - """Loads all modules that contain configuration. - - Method iterates over modules of :py:module:`monasca_api.conf` - and imports only those that contain following methods: - - - list_opts (required by oslo_config.genconfig) - - register_opts (required by :py:currentmodule:) - - """ - for modname in _list_module_names(): - mod = importutils.import_module('monasca_api.conf.' + modname) - required_funcs = ['register_opts', 'list_opts'] - for func in required_funcs: - if hasattr(mod, func): - yield mod - - -def _list_module_names(): - package_path = os.path.dirname(os.path.abspath(__file__)) - for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]): - if not (modname == "opts" and ispkg): - yield modname - - -def register_opts(): - """Registers all conf modules opts. - - This method allows different modules to register - opts according to their needs. - - """ - _register_api_opts() - _register_db_opts() - - -def _register_api_opts(): - for mod in load_conf_modules(): - mod.register_opts(CONF) - - -def _register_db_opts(): - oslo_db_opts.set_defaults(CONF, connection='sqlite://', - max_pool_size=10, max_overflow=20, - pool_timeout=10) - - -def list_opts(): - """Lists all conf modules opts. - - Goes through all conf modules and yields their opts - - """ - for mod in load_conf_modules(): - mod_opts = mod.list_opts() - yield mod_opts[0], mod_opts[1] diff --git a/monasca_api/conf/cassandra.py b/monasca_api/conf/cassandra.py deleted file mode 100644 index 275463cc9..000000000 --- a/monasca_api/conf/cassandra.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# (C) Copyright 2017-2018 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_config import types - -cassandra_opts = [ - cfg.ListOpt('contact_points', - default=['127.0.0.1'], - item_type=types.HostAddress(), - help=''' -Comma separated list of Cassandra node IP addresses -'''), - cfg.PortOpt('port', default=9042, - help=''' -Cassandra port number -'''), - cfg.StrOpt('keyspace', default='monasca', - help=''' -keyspace where metric are stored -'''), - cfg.StrOpt('user', default='', - help=''' -Cassandra user for monasca-api service -'''), - cfg.StrOpt('password', default='', secret=True, - help=''' -Cassandra user password for monasca-api service -'''), - cfg.IntOpt('connection_timeout', default=5, - help=''' -Cassandra timeout in seconds when creating a new connection -'''), - cfg.StrOpt('local_data_center', default='', - help=''' -Cassandra local data center name -''') -] - -cassandra_group = cfg.OptGroup(name='cassandra') - - -def register_opts(conf): - conf.register_group(cassandra_group) - conf.register_opts(cassandra_opts, cassandra_group) - - -def list_opts(): - return cassandra_group, cassandra_opts diff --git a/monasca_api/conf/dispatcher.py b/monasca_api/conf/dispatcher.py deleted file mode 100644 index 7593b6cf6..000000000 --- a/monasca_api/conf/dispatcher.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2014 IBM Corp -# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -dispatcher_opts = [ - cfg.StrOpt('versions', - default='monasca_api.v2.reference.versions:Versions', - help='Versions controller'), - cfg.StrOpt('version_2_0', - default='monasca_api.v2.reference.version_2_0:Version2', - help='Version 2.0 controller'), - cfg.StrOpt('metrics', - default='monasca_api.v2.reference.metrics:Metrics', - help='Metrics controller'), - cfg.StrOpt('metrics_measurements', - default='monasca_api.v2.reference.metrics:MetricsMeasurements', - help='Metrics measurements controller'), - cfg.StrOpt('metrics_statistics', - default='monasca_api.v2.reference.metrics:MetricsStatistics', - help='Metrics statistics controller'), - cfg.StrOpt('metrics_names', - default='monasca_api.v2.reference.metrics:MetricsNames', - help='Metrics names controller'), - cfg.StrOpt('alarm_definitions', - default='monasca_api.v2.reference.' - 'alarm_definitions:AlarmDefinitions', - help='Alarm definitions controller'), - cfg.StrOpt('alarms', - default='monasca_api.v2.reference.alarms:Alarms', - help='Alarms controller'), - cfg.StrOpt('alarms_count', - default='monasca_api.v2.reference.alarms:AlarmsCount', - help='Alarms Count controller'), - cfg.StrOpt('alarms_state_history', - default='monasca_api.v2.reference.alarms:AlarmsStateHistory', - help='Alarms state history controller'), - cfg.StrOpt('notification_methods', - default='monasca_api.v2.reference.notifications:Notifications', - help='Notification Methods controller'), - cfg.StrOpt('dimension_values', - default='monasca_api.v2.reference.metrics:DimensionValues', - help='Dimension Values controller'), - cfg.StrOpt('dimension_names', - default='monasca_api.v2.reference.metrics:DimensionNames', - help='Dimension Names controller'), - cfg.StrOpt('notification_method_types', - default='monasca_api.v2.reference.' - 'notificationstype:NotificationsType', - help='Notifications Type Methods controller'), - cfg.StrOpt('logs', - default='monasca_api.v2.reference.logs:Logs', - help='Logs controller'), - cfg.StrOpt('healthchecks', - default='monasca_api.healthchecks:HealthChecks', - help='Health checks endpoint controller') -] - -dispatcher_group = cfg.OptGroup(name='dispatcher', title='dispatcher') - - -def register_opts(conf): - conf.register_group(dispatcher_group) - conf.register_opts(dispatcher_opts, dispatcher_group) - - -def list_opts(): - return dispatcher_group, dispatcher_opts diff --git a/monasca_api/conf/global.py b/monasca_api/conf/global.py deleted file mode 100644 index 73385b3ee..000000000 --- a/monasca_api/conf/global.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -_DEFAULT_NOTIF_PERIODS = [0, 60] - -global_opts = [ - cfg.StrOpt('region', sample_default='RegionOne', - help=''' -Region that API is running in -'''), - cfg.ListOpt('valid_notification_periods', default=_DEFAULT_NOTIF_PERIODS, - item_type=int, - help=''' -Valid periods for notification methods -'''), - cfg.BoolOpt('enable_metrics_api', default='true', - help=''' -Enable Metrics api endpoints'''), - cfg.BoolOpt('enable_logs_api', default='false', - help=''' -Enable Logs api endpoints''') -] - - -def register_opts(conf): - conf.register_opts(global_opts) - - -def list_opts(): - return 'DEFAULT', global_opts diff --git a/monasca_api/conf/influxdb.py b/monasca_api/conf/influxdb.py deleted file mode 100644 index a55ae1df1..000000000 --- a/monasca_api/conf/influxdb.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg - -influxdb_opts = [ - cfg.StrOpt('database_name', default='mon', - help=''' -Database name where metrics are stored -'''), - cfg.BoolOpt('db_per_tenant', default=False, - help=''' -Whether to use a separate database per tenant -'''), - cfg.HostAddressOpt('ip_address', default='127.0.0.1', - help=''' -IP address to Influxdb server -'''), - cfg.PortOpt('port', default=8086, - help='Port to Influxdb server'), - cfg.StrOpt('user', required=True, - sample_default='monasca-api', help=''' -Influxdb user -'''), - cfg.StrOpt('password', secret=True, sample_default='password', - help=''' -Influxdb password -''') -] - -influxdb_group = cfg.OptGroup(name='influxdb', title='influxdb') - - -def register_opts(conf): - conf.register_group(influxdb_group) - conf.register_opts(influxdb_opts, influxdb_group) - - -def list_opts(): - return influxdb_group, influxdb_opts diff --git a/monasca_api/conf/kafka.py b/monasca_api/conf/kafka.py deleted file mode 100644 index 7d39f3eaf..000000000 --- a/monasca_api/conf/kafka.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg - -from monasca_api.conf import types - -kafka_opts = [ - cfg.ListOpt('uri', - default=['127.0.0.1:9092'], - item_type=types.HostAddressPortType(), - help='Comma separated list of Kafka broker host:port'), - cfg.StrOpt('metrics_topic', default='metrics', - help='The topic that metrics will be published to'), - cfg.StrOpt('events_topic', default='events', - help='The topic that events will be published to'), - cfg.MultiStrOpt('logs_topics', default=['log'], - help='The topic that logs will be published to'), - cfg.StrOpt('alarm_state_transitions_topic', - default='alarm-state-transitions', - help='The topic that alarm state will be published to'), - cfg.StrOpt('group', default='api', - help='The group name that this service belongs to'), - cfg.IntOpt('wait_time', default=1, - advanced=True, min=1, - help='The wait time when no messages on kafka queue (NOT USED)'), - cfg.IntOpt('ack_time', default=20, - help='The ack time back to kafka. (NOT USED)'), - cfg.IntOpt('max_retry', default=3, - help='Number of retries in case of connection error (NOT USED)'), - cfg.BoolOpt('auto_commit', default=False, - advanced=True, - help='Whether the message is automatically committed ' - '(NOT USED)'), - cfg.BoolOpt('is_async', default=True, - help='Whether posting is asynchronous or not (NOT USED)'), - cfg.BoolOpt('compact', default=True, - help='Specify if the message received should be parsed. If ' - 'True, message will not be parsed, otherwise messages ' - 'will be parsed (NOT USED)'), - cfg.ListOpt('partitions', item_type=int, - default=[0], - help='The partitions this connection should listen for ' - 'messages on. (NOT USED)'), - cfg.BoolOpt('drop_data', default=False, - help='Specify if received data should be simply dropped. ' - 'This parameter is only for testing purposes. (NOT USED)'), - cfg.BoolOpt(name='legacy_kafka_client_enabled', default=False, - required=True, advanced=True, - help='Enable legacy Kafka client. When set old version of ' - 'kafka-python library is used. Message format version ' - 'for the brokers should be set to 0.9.0.0 to avoid ' - 'performance issues until all consumers are upgraded.'), - cfg.IntOpt('queue_buffering_max_messages', default=1000, - help='The maximum number of metrics per payload sent to ' - 'Kafka. Posts to the Monasca API which exceed this will ' - 'be chunked into batches not exceeding this number.') -] - -kafka_group = cfg.OptGroup(name='kafka', title='kafka') - - -def register_opts(conf): - conf.register_group(kafka_group) - conf.register_opts(kafka_opts, kafka_group) - - -def list_opts(): - return kafka_group, kafka_opts diff --git a/monasca_api/conf/log_publisher.py b/monasca_api/conf/log_publisher.py deleted file mode 100644 index d1e2af1ff..000000000 --- a/monasca_api/conf/log_publisher.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -_MAX_MESSAGE_SIZE = 1048576 -_DEFAULT_MAX_LOG_SIZE = 1024 * 1024 - -log_publisher_opts = [ - cfg.IntOpt('max_message_size', - default=_MAX_MESSAGE_SIZE, - required=True, - help=''' -Message max size that can be sent to kafka, default to %d bytes -''' % _MAX_MESSAGE_SIZE), - cfg.StrOpt('region', - default='Region;', - help=''' -Region -'''), - cfg.IntOpt('max_log_size', - default=_DEFAULT_MAX_LOG_SIZE, - help=''' -Refers to payload/envelope size. If either is exceeded API will throw an error -''') -] - -log_publisher_group = cfg.OptGroup(name='log_publisher', title='log_publisher') - - -def register_opts(conf): - conf.register_group(log_publisher_group) - conf.register_opts(log_publisher_opts, log_publisher_group) - - -def list_opts(): - return log_publisher_group, log_publisher_opts diff --git a/monasca_api/conf/messaging.py b/monasca_api/conf/messaging.py deleted file mode 100644 index c6b95f94b..000000000 --- a/monasca_api/conf/messaging.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -messaging_opts = [ - cfg.StrOpt('driver', help=''' -The message queue driver to use -'''), - cfg.StrOpt('metrics_message_format', default='reference', - deprecated_for_removal=True, - deprecated_since="2.1.0", - deprecated_reason=''' -Option is not used anywhere in the codebase -''', - help=''' -The type of metrics message format to publish to the message queue -'''), - cfg.StrOpt('events_message_format', default='reference', - deprecated_for_removal=True, - deprecated_since='2.1.0', - deprecated_reason=''' -Option is not used anywhere in the codebase -''', - help=''' -The type of events message format to publish to the message queue -''') -] - -messaging_group = cfg.OptGroup(name='messaging', title='messaging') - - -def register_opts(conf): - conf.register_group(messaging_group) - conf.register_opts(messaging_opts, messaging_group) - - -def list_opts(): - return messaging_group, messaging_opts diff --git a/monasca_api/conf/repositories.py b/monasca_api/conf/repositories.py deleted file mode 100644 index 653d0d130..000000000 --- a/monasca_api/conf/repositories.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg - -BASE_SQL_PATH = 'monasca_api.common.repositories.sqla.' - -repositories_opts = [ - cfg.StrOpt( - 'metrics_driver', - default='monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository', - advanced=True, - help=''' -The repository driver to use for metrics -'''), - cfg.StrOpt( - 'alarm_definitions_driver', - default=BASE_SQL_PATH + - 'alarm_definitions_repository:AlarmDefinitionsRepository', - advanced=True, - help=''' -The repository driver to use for alarm definitions -'''), - cfg.StrOpt( - 'alarms_driver', - default=BASE_SQL_PATH + - 'alarms_repository:AlarmsRepository', - advanced=True, - help=''' -The repository driver to use for alarms -'''), - cfg.StrOpt( - 'notifications_driver', - default=BASE_SQL_PATH + - 'notifications_repository:NotificationsRepository', - advanced=True, - help=''' -The repository driver to use for notifications -'''), - cfg.StrOpt( - 'notification_method_type_driver', - default=BASE_SQL_PATH + - 'notification_method_type_repository:NotificationMethodTypeRepository', - advanced=True, - help=''' -The repository driver to use for notifications -''')] - -repositories_group = cfg.OptGroup(name='repositories', title='repositories') - - -def register_opts(conf): - conf.register_group(repositories_group) - conf.register_opts(repositories_opts, repositories_group) - - -def list_opts(): - return repositories_group, repositories_opts diff --git a/monasca_api/conf/security.py b/monasca_api/conf/security.py deleted file mode 100644 index c0c6130a0..000000000 --- a/monasca_api/conf/security.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2014 IBM Corp. -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -security_opts = [ - cfg.ListOpt('healthcheck_roles', default=['@'], - help=''' -Roles that are allowed to check the health -'''), - cfg.ListOpt('versions_roles', default=['@'], - help=''' -Roles that are allowed to check the versions -'''), - cfg.ListOpt('default_authorized_roles', default=['monasca-user'], - help=''' -Roles that are allowed full access to the API -'''), - cfg.ListOpt('agent_authorized_roles', default=['monasca-agent'], - help=''' -Roles that are only allowed to POST to the API -'''), - cfg.ListOpt('read_only_authorized_roles', - default=['monasca-read-only-user'], - help=''' -Roles that are only allowed to GET from the API -'''), - cfg.ListOpt('delegate_authorized_roles', default=['admin'], - help=''' -Roles that are allowed to POST metrics on behalf of another tenant -''') -] - -security_group = cfg.OptGroup(name='security', title='security') - - -def register_opts(conf): - conf.register_group(security_group) - conf.register_opts(security_opts, security_group) - - -def list_opts(): - return security_group, security_opts diff --git a/monasca_api/conf/types.py b/monasca_api/conf/types.py deleted file mode 100644 index f710cf199..000000000 --- a/monasca_api/conf/types.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_config import types -from oslo_utils import netutils - - -class HostAddressPortOpt(cfg.Opt): - """Option for HostAddressPortType. - - Accept hostname or ip address with TCP/IP port number. - """ - def __init__(self, name, **kwargs): - ip_port_type = HostAddressPortType() - super(HostAddressPortOpt, self).__init__(name, - type=ip_port_type, - **kwargs) - - -class HostAddressPortType(types.HostAddress): - """HostAddress with additional port.""" - - def __init__(self, version=None): - type_name = 'ip and port value' - super(HostAddressPortType, self).__init__(version, type_name=type_name) - - def __call__(self, value): - addr, port = netutils.parse_host_port(value) - # NOTE(gmann): parse_host_port() return port as None if no port is - # supplied in value so setting port as string for correct - # parsing and error otherwise it will not be parsed for NoneType. - port = 'None' if port is None else port - addr = self.validate_addr(addr) - port = self._validate_port(port) - if not addr and not port: - raise ValueError('%s is not valid ip with optional port') - return '%s:%d' % (addr, port) - - @staticmethod - def _validate_port(port): - return types.Port()(port) - - def validate_addr(self, addr): - try: - addr = self.ip_address(addr) - except ValueError: - try: - addr = self.hostname(addr) - except ValueError: - raise ValueError("%s is not a valid host address", addr) - return addr diff --git a/monasca_api/config.py b/monasca_api/config.py deleted file mode 100644 index 3b108a7c1..000000000 --- a/monasca_api/config.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from oslo_config import cfg -from oslo_log import log -from oslo_policy import opts as policy_opts - -from monasca_api import conf -from monasca_api import version - -CONF = conf.CONF -LOG = log.getLogger(__name__) - -_CONF_LOADED = False -_GUNICORN_MARKER = 'gunicorn' - - -def parse_args(argv=None): - """Loads application configuration. - - Loads entire application configuration just once. - - """ - global _CONF_LOADED - if _CONF_LOADED: - LOG.debug('Configuration has been already loaded') - return - - log.set_defaults() - log.register_options(CONF) - - argv = (argv if argv is not None else sys.argv[1:]) - args = ([] if _is_running_under_gunicorn() else argv or []) - - CONF(args=args, - prog=sys.argv[1:], - project='monasca', - version=version.version_str, - default_config_files=get_config_files(), - description='RESTful API for alarming in the cloud') - - log.setup(CONF, - product_name='monasca-api', - version=version.version_str) - conf.register_opts() - policy_opts.set_defaults(CONF) - - _CONF_LOADED = True - - -def get_config_files(): - """Get the possible configuration files accepted by oslo.config - - This also includes the deprecated ones - """ - # default files - conf_files = cfg.find_config_files(project='monasca', prog='monasca-api') - # deprecated config files (only used if standard config files are not there) - if len(conf_files) == 0: - for prog_name in ['api', 'api-config']: - old_conf_files = cfg.find_config_files(project='monasca', prog=prog_name) - if len(old_conf_files) > 0: - LOG.warning('Found deprecated old location "{}" ' - 'of main configuration file'.format(old_conf_files)) - conf_files += old_conf_files - return conf_files - - -def _is_running_under_gunicorn(): - """Evaluates if api runs under gunicorn.""" - content = filter(lambda x: x != sys.executable and _GUNICORN_MARKER in x, - sys.argv or []) - return len(list(content) if not isinstance(content, list) else content) > 0 diff --git a/monasca_api/db/README b/monasca_api/db/README deleted file mode 100644 index db73088a4..000000000 --- a/monasca_api/db/README +++ /dev/null @@ -1,12 +0,0 @@ -This directory contains the plumbing for the Alembic migrations that modify the -Monasca database. - -If you need to add a new migration, run - - alembic revision -m '' - -in this directory, where is a short description for what -your migration does such as 'Add volume field to alarm'. Alembic will then -create a revision script in the alembic/versions/ directory. You will need to -edit this script to add upwards and downwards migrations for the change you -want to make. diff --git a/monasca_api/db/__init__.py b/monasca_api/db/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/db/alembic.ini b/monasca_api/db/alembic.ini deleted file mode 100644 index ca5abe4eb..000000000 --- a/monasca_api/db/alembic.ini +++ /dev/null @@ -1,74 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# timezone to use when rendering the date -# within the migration file as well as the filename. -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; this defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path -# version_locations = %(here)s/bar %(here)s/bat alembic/versions - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -# sqlalchemy.url = driver://user:pass@localhost/dbname - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/monasca_api/db/alembic/__init__.py b/monasca_api/db/alembic/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/db/alembic/env.py b/monasca_api/db/alembic/env.py deleted file mode 100644 index 917868722..000000000 --- a/monasca_api/db/alembic/env.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - -from alembic import config as alembic_config -from alembic import context -from logging.config import fileConfig - -from monasca_api.common.repositories.sqla import models -from monasca_api.common.repositories.sqla import sql_repository -import monasca_api.config - -ini_file_path = os.path.join(os.path.dirname(__file__), '..', 'alembic.ini') - -# This indicates whether we are running with a viable Alembic -# context (necessary to skip run_migrations_online() below -# if sphinx imports this file without a viable Alembic -# context) -have_context = True - -try: - config = context.config - # Only load Monasca configuration if imported by alembic CLI tool (the - # monasca_db command will handle this on its own). - if os.path.basename(sys.argv[0]) == 'alembic': - monasca_api.config.parse_args(argv=[]) -except AttributeError: - config = alembic_config.Config(ini_file_path) - have_context = False - - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# Model metadata. This is needed for 'autogenerate' support. If you add new -# tables, you will need to add them to the get_all_metadata() method as well. -target_metadata = models.get_all_metadata() - -nc = {"ix": "ix_%(column_0_label)s", - "uq": "uq_%(table_name)s_%(column_0_name)s", - "fk": "fk_%(table_name)s_%(column_0_name)s", - "pk": "pk_%(table_name)s"} - -target_metadata.naming_convention = nc - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = sql_repository.get_engine() - - with engine.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - -if have_context: - run_migrations_online() diff --git a/monasca_api/db/alembic/script.py.mako b/monasca_api/db/alembic/script.py.mako deleted file mode 100644 index 2c0156303..000000000 --- a/monasca_api/db/alembic/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/monasca_api/db/alembic/versions/00597b5c8325_initial.py b/monasca_api/db/alembic/versions/00597b5c8325_initial.py deleted file mode 100644 index b04486363..000000000 --- a/monasca_api/db/alembic/versions/00597b5c8325_initial.py +++ /dev/null @@ -1,587 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Initial migration for full schema (Git revision 00597b5c8325664c2c534625525f59232d243d66). - -Revision ID: 00597b5c8325 -Revises: N/A -Create Date: 2018-04-12 09:09:48.212206 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '00597b5c8325' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # Enum tables (will be prepopulated with values through bulk_insert) - - alarm_states = op.create_table('alarm_state', - sa.Column('name', - sa.String(length=20), - nullable=False), - sa.PrimaryKeyConstraint('name'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci' - ) - - op.bulk_insert(alarm_states, - [{'name': 'UNDETERMINED'}, - {'name': 'OK'}, - {'name': 'ALARM'}]) - - ad_severities = op.create_table( - 'alarm_definition_severity', - sa.Column('name', - sa.String(length=20), - nullable=False), - sa.PrimaryKeyConstraint('name'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.bulk_insert(ad_severities, - [{'name': 'LOW'}, - {'name': 'MEDIUM'}, - {'name': 'HIGH'}, - {'name': 'CRITICAL'}]) - - nm_types = op.create_table( - 'notification_method_type', - sa.Column('name', - sa.String(length=20), - nullable=False), - sa.PrimaryKeyConstraint('name'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.bulk_insert(nm_types, - [{'name': 'EMAIL'}, - {'name': 'WEBHOOK'}, - {'name': 'PAGERDUTY'}]) - - stream_action_types = op.create_table( - 'stream_actions_action_type', - sa.Column('name', - sa.String(length=20), - nullable=False), - sa.PrimaryKeyConstraint('name'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.bulk_insert(stream_action_types, - [{'name': 'FIRE'}, - {'name': 'EXPIRE'}]) - - op.create_table( - 'alarm_definition', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('tenant_id', - sa.String(length=36), - nullable=False), - sa.Column('name', - sa.String(length=255), - nullable=False, - server_default=''), - sa.Column('description', - sa.String(length=255), - nullable=True, - server_default=None), - sa.Column('expression', - sa.dialects.mysql.LONGTEXT(), - nullable=False), - sa.Column('severity', - sa.String(length=20), - nullable=False), - sa.Column('match_by', - sa.String(length=255), - nullable=True, - server_default=''), - sa.Column('actions_enabled', - sa.Boolean(), - nullable=False, - server_default='1'), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.Column('deleted_at', - sa.DateTime(), - nullable=True, - server_default=None), - sa.PrimaryKeyConstraint('id'), - sa.Index('tenant_id', 'tenant_id'), - sa.Index('deleted_at', 'deleted_at'), - sa.Index('fk_alarm_definition_severity', 'severity'), - sa.ForeignKeyConstraint(['severity'], - ['alarm_definition_severity.name']), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'alarm', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('alarm_definition_id', - sa.String(length=36), - nullable=False, - server_default=''), - sa.Column('state', - sa.String(length=20), - nullable=False), - sa.Column('lifecycle_state', - sa.String(length=50, collation=False), - nullable=True, - server_default=None), - sa.Column('link', - sa.String(length=512, collation=False), - nullable=True, - server_default=None), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('state_updated_at', - sa.DateTime(), - nullable=True), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.Index('alarm_definition_id', 'alarm_definition_id'), - sa.Index('fk_alarm_alarm_state', 'state'), - sa.ForeignKeyConstraint(['alarm_definition_id'], - ['alarm_definition.id'], - name='fk_alarm_definition_id', - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['state'], - ['alarm_state.name'], - name='fk_alarm_alarm_state'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'notification_method', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('tenant_id', - sa.String(length=36), - nullable=False), - sa.Column('name', - sa.String(length=250), - nullable=True, - server_default=None), - sa.Column('type', - sa.String(length=20), - # Note: the typo below is deliberate since we need to match - # the constraint name from the SQL script where it is - # misspelled as well. - sa.ForeignKey('notification_method_type.name', - name='fk_alarm_noticication_method_type'), - nullable=False), - sa.Column('address', - sa.String(length=512), - nullable=True, - server_default=None), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'alarm_action', - sa.Column('alarm_definition_id', - sa.String(length=36), - nullable=False,), - sa.Column('alarm_state', - sa.String(length=20), - nullable=False), - sa.Column('action_id', - sa.String(length=36), - nullable=False), - sa.PrimaryKeyConstraint('alarm_definition_id', 'alarm_state', - 'action_id'), - sa.ForeignKeyConstraint(['action_id'], - ['notification_method.id'], - name='fk_alarm_action_notification_method_id', - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['alarm_state'], - ['alarm_state.name']), - sa.ForeignKeyConstraint(['alarm_definition_id'], - ['alarm_definition.id'], - ondelete='CASCADE'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'alarm_metric', - sa.Column('alarm_id', - sa.String(length=36), - nullable=False), - sa.Column('metric_definition_dimensions_id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.PrimaryKeyConstraint('alarm_id', 'metric_definition_dimensions_id'), - sa.Index('alarm_id', 'alarm_id'), - sa.Index('metric_definition_dimensions_id', 'metric_definition_dimensions_id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - # For some mysterious alembic/sqlalchemy reason this foreign key constraint - # ends up missing when specified upon table creation. Hence we need to add - # it through an ALTER TABLE operation: - op.create_foreign_key('fk_alarm_id', - 'alarm_metric', - 'alarm', - ['alarm_id'], - ['id'], ondelete='CASCADE') - - op.create_table( - 'metric_definition', - sa.Column('id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.Column('name', - sa.String(length=255), - nullable=False), - sa.Column('tenant_id', - sa.String(length=36), - nullable=False), - sa.Column('region', - sa.String(length=255), - nullable=False, - server_default=''), - sa.PrimaryKeyConstraint('id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'metric_definition_dimensions', - sa.Column('id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.Column('metric_definition_id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.Column('metric_dimension_set_id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.PrimaryKeyConstraint('id'), - sa.Index('metric_definition_id', 'metric_definition_id'), - sa.Index('metric_dimension_set_id', 'metric_dimension_set_id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - # mysql limits the size of a unique key to 767 bytes. The utf8mb4 charset - # requires 4 bytes to be allocated for each character while the utf8 - # charset requires 3 bytes. The utf8 charset should be sufficient for any - # reasonable characters, see the definition of supplementary characters for - # what it doesn't support. Even with utf8, the unique key length would be - # 785 bytes so only a subset of the name is used. Potentially the size of - # the name should be limited to 250 characters which would resolve this - # issue. - # - # The unique key is required to allow high performance inserts without - # doing a select by using the "insert into metric_dimension ... on - # duplicate key update dimension_set_id=dimension_set_id syntax - - op.create_table( - 'metric_dimension', - sa.Column('dimension_set_id', - sa.BINARY(20), - nullable=False, - server_default='\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'), - sa.Column('name', - sa.String(length=255), - nullable=False, - server_default=''), - sa.Column('value', - sa.String(length=255), - nullable=False, - server_default=''), - sa.Index('metric_dimension_key', - 'dimension_set_id', 'name', - unique=True, - mysql_length={'name': 252}), - sa.Index('dimension_set_id', 'dimension_set_id'), - mysql_charset='utf8', - mysql_collate='utf8_unicode_ci', - mysql_comment='PRIMARY KEY (`id`)') - - op.create_table( - 'sub_alarm_definition', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('alarm_definition_id', - sa.String(length=36), - sa.ForeignKey('alarm_definition.id', ondelete='CASCADE', - name='fk_sub_alarm_definition'), - nullable=False, - server_default=''), - sa.Column('function', - sa.String(length=10), - nullable=False), - sa.Column('metric_name', - sa.String(length=100), - nullable=True, - server_default=None), - sa.Column('operator', - sa.String(length=5), - nullable=False), - sa.Column('threshold', - sa.dialects.mysql.DOUBLE(), - nullable=False), - sa.Column('period', - sa.Integer(), - nullable=False), - sa.Column('periods', - sa.Integer(), - nullable=False), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'sub_alarm_definition_dimension', - sa.Column('sub_alarm_definition_id', - sa.String(length=36), - sa.ForeignKey('sub_alarm_definition.id', ondelete='CASCADE', - name='fk_sub_alarm_definition_dimension'), - nullable=False, - server_default=''), - sa.Column('dimension_name', - sa.String(length=255), - nullable=False, - server_default=''), - sa.Column('value', - sa.String(length=255), - nullable=True, - server_default=None), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'sub_alarm', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('alarm_id', - sa.String(length=36), - sa.ForeignKey('alarm.id', ondelete='CASCADE', - name='fk_sub_alarm'), - nullable=False, - server_default=''), - sa.Column('sub_expression_id', - sa.String(length=36), - sa.ForeignKey('sub_alarm_definition.id', - name='fk_sub_alarm_expr'), - nullable=False, - server_default=''), - sa.Column('expression', - sa.dialects.mysql.LONGTEXT(), - nullable=False), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'schema_migrations', - sa.Column('version', - sa.String(length=255), - nullable=False), - sa.UniqueConstraint('version', name='unique_schema_migrations'), - mysql_charset='latin1') - - op.create_table( - 'stream_definition', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('tenant_id', - sa.String(length=36), - nullable=False), - sa.Column('name', - sa.String(length=190), - nullable=False, - server_default=''), - sa.Column('description', - sa.String(length=255), - nullable=True, - server_default=None), - sa.Column('select_by', - sa.dialects.mysql.LONGTEXT(), - nullable=True, - server_default=None), - sa.Column('group_by', - sa.dialects.mysql.LONGTEXT(length=20), - nullable=True, - server_default=None), - sa.Column('fire_criteria', - sa.dialects.mysql.LONGTEXT(length=20), - nullable=True, - server_default=None), - sa.Column('expiration', - sa.dialects.mysql.INTEGER(display_width=10, - unsigned=True), - nullable=True, - server_default='0'), - sa.Column('actions_enabled', - sa.Boolean(), - nullable=False, - server_default='1'), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.Column('deleted_at', - sa.DateTime(), - nullable=True, - server_default=None), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('tenant_id', 'name', name='tenant_name'), - sa.Index('name', 'name'), - sa.Index('tenant_id', 'tenant_id'), - sa.Index('deleted_at', 'deleted_at'), - sa.Index('created_at', 'created_at'), - sa.Index('updated_at', 'updated_at'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'stream_actions', - sa.Column('stream_definition_id', - sa.String(length=36), - sa.ForeignKey - ('stream_definition.id', - name='fk_stream_action_stream_definition_id', - ondelete='CASCADE'), - nullable=False), - sa.Column('action_id', - sa.String(length=36), - sa.ForeignKey('notification_method.id', - name='fk_stream_action_notification_method_id', - ondelete='CASCADE'), - nullable=False), - sa.Column('action_type', - sa.String(length=20), - sa.ForeignKey('stream_actions_action_type.name'), - nullable=False), - sa.PrimaryKeyConstraint('stream_definition_id', 'action_id', - 'action_type'), - sa.Index('stream_definition_id', 'stream_definition_id'), - sa.Index('action_type', 'action_type'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'event_transform', - sa.Column('id', - sa.dialects.mysql.VARCHAR(length=36, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('tenant_id', - sa.dialects.mysql.VARCHAR(length=36, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('name', - sa.dialects.mysql.VARCHAR(length=64, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('description', - sa.dialects.mysql.VARCHAR(length=250, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('specification', - sa.dialects.mysql.LONGTEXT(charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('enabled', - sa.Boolean(), - nullable=True, - server_default=None), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.Column('deleted_at', - sa.DateTime(), - nullable=True, - server_default=None), - sa.PrimaryKeyConstraint('id'), - sa.Index('name', 'name'), - sa.Index('tenant_id', 'tenant_id'), - sa.Index('deleted_at', 'deleted_at'), - sa.Index('created_at', 'created_at'), - sa.Index('updated_at', 'updated_at'), - sa.UniqueConstraint('tenant_id', 'name', name='tenant_name'), - mysql_charset='utf8mb4') - - -def downgrade(): - op.drop_table('alarm_state') - op.drop_table('alarm_definition_severity') - op.drop_table('notification_method_type') - op.drop_table('stream_actions_action_type') - op.drop_table('alarm_definition') - op.drop_table('alarm') - op.drop_table('notification_method') - op.drop_table('alarm_action') - op.drop_table('alarm_metric') - op.drop_table('metric_definition') - op.drop_table('metric_definition_dimensions') - op.drop_table('metric_dimension') - op.drop_table('sub_alarm_definition') - op.drop_table('sub_alarm_definition_dimension') - op.drop_table('sub_alarm') - op.drop_table('schema_migrations') - op.drop_table('stream_definition') - op.drop_table('stream_actions') - op.drop_table('event_transform') diff --git a/monasca_api/db/alembic/versions/0cce983d957a_deterministic_alarms.py b/monasca_api/db/alembic/versions/0cce983d957a_deterministic_alarms.py deleted file mode 100644 index baa71273a..000000000 --- a/monasca_api/db/alembic/versions/0cce983d957a_deterministic_alarms.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" Add flag for deterministic alarms (Git revision 0cce983d957a3d780b6d206ad25df1271a812b4a). - -Revision ID: 0cce983d957a -Revises: 00597b5c8325 -Create Date: 2018-04-23 13:57:32.951669 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '0cce983d957a' -down_revision = '00597b5c8325' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('sub_alarm_definition', - sa.Column('is_deterministic', - sa.Boolean(), - nullable=False, - server_default='0')) - - -def downgrade(): - op.drop_column('sub_alarm_definition', 'is_deterministic') diff --git a/monasca_api/db/alembic/versions/26083b298bb7_remove_builtin_notification_types.py b/monasca_api/db/alembic/versions/26083b298bb7_remove_builtin_notification_types.py deleted file mode 100644 index 46c203db2..000000000 --- a/monasca_api/db/alembic/versions/26083b298bb7_remove_builtin_notification_types.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2018 StackHPC Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Remove builtin notification types - -Revision ID: 26083b298bb7 -Revises: f69cb3152a76 -Create Date: 2018-09-18 13:52:02.170226 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.sql import table - -_NM_BUILT_IN_TYPES = set(['EMAIL', 'WEBHOOK', 'PAGERDUTY']) - -# revision identifiers, used by Alembic. -revision = '26083b298bb7' -down_revision = 'f69cb3152a76' -branch_labels = None -depends_on = None - -_nm_types = table( - 'notification_method_type', - sa.Column('name', - sa.String(length=20), - nullable=False)) - -_nm = table( - 'notification_method', - sa.Column('type', - sa.String(length=20), - nullable=False)) - - -def upgrade(): - # Built-in notification types have been removed. Here, we - # remove them (where not in use) and rely on Monasca Notification - # to re-populate the table according to what is set in its config file. - - # Start by creating a set of all notification method types currently - # configured in the Monasca DB - connection = op.get_bind() - nm_types_configured = connection.execute(_nm.select()).fetchall() - nm_types_configured = set([nm_type[0] for nm_type in nm_types_configured]) - - # Remove all built in notification types which are currently *not* - # configured. - nm_types_to_remove = _NM_BUILT_IN_TYPES.difference(nm_types_configured) - op.execute(_nm_types.delete().where( - _nm_types.c.name.in_(nm_types_to_remove))) - - -def downgrade(): - # Some or all of these might be present if they have been explicitly - # enabled in monasca-notification. - op.execute(_nm_types.insert().prefix_with("IGNORE").values( - [{'name': 'EMAIL'}, - {'name': 'WEBHOOK'}, - {'name': 'PAGERDUTY'}])) diff --git a/monasca_api/db/alembic/versions/30181b42434b_remove_event_and_migration_tables.py b/monasca_api/db/alembic/versions/30181b42434b_remove_event_and_migration_tables.py deleted file mode 100644 index ab8f22abb..000000000 --- a/monasca_api/db/alembic/versions/30181b42434b_remove_event_and_migration_tables.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Remove event related tables and schema_migrations table (git revision -30181b42434bdde0c40abd086e903600b24e9684) - -Revision ID: 30181b42434b -Revises: c2f85438d6f3 -Create Date: 2018-04-24 09:54:50.024470 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '30181b42434b' -down_revision = 'c2f85438d6f3' -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_table('event_transform') - op.drop_table('schema_migrations') - op.drop_table('stream_actions') - op.drop_table('stream_definition') - - -def downgrade(): - op.create_table( - 'event_transform', - sa.Column('id', - sa.dialects.mysql.VARCHAR(length=36, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('tenant_id', - sa.dialects.mysql.VARCHAR(length=36, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('name', - sa.dialects.mysql.VARCHAR(length=64, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('description', - sa.dialects.mysql.VARCHAR(length=250, charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('specification', - sa.dialects.mysql.LONGTEXT(charset='utf8mb4', - collation='utf8mb4_unicode_ci'), - nullable=False), - sa.Column('enabled', - sa.Boolean(), - nullable=True, - server_default=None), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.Column('deleted_at', - sa.DateTime(), - nullable=True, - server_default=None), - sa.PrimaryKeyConstraint('id'), - sa.Index('name', 'name'), - sa.Index('tenant_id', 'tenant_id'), - sa.Index('deleted_at', 'deleted_at'), - sa.Index('created_at', 'created_at'), - sa.Index('updated_at', 'updated_at'), - sa.UniqueConstraint('tenant_id', 'name', name='tenant_name'), - mysql_charset='utf8mb4') - - op.create_table( - 'schema_migrations', - sa.Column('version', - sa.String(length=255), - nullable=False), - sa.UniqueConstraint('version', name='unique_schema_migrations'), - mysql_charset='latin1') - - op.create_table( - 'stream_actions', - sa.Column('stream_definition_id', - sa.String(length=36), - sa.ForeignKey - ('stream_definition.id', - name='fk_stream_action_stream_definition_id', - ondelete='CASCADE'), - nullable=False), - sa.Column('action_id', - sa.String(length=36), - sa.ForeignKey('notification_method.id', - name='fk_stream_action_notification_method_id', - ondelete='CASCADE'), - nullable=False), - sa.Column('action_type', - sa.String(length=20), - sa.ForeignKey('stream_actions_action_type.name'), - nullable=False), - sa.PrimaryKeyConstraint('stream_definition_id', 'action_id', - 'action_type'), - sa.Index('stream_definition_id', 'stream_definition_id'), - sa.Index('action_type', 'action_type'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.create_table( - 'stream_definition', - sa.Column('id', - sa.String(length=36), - nullable=False), - sa.Column('tenant_id', - sa.String(length=36), - nullable=False), - sa.Column('name', - sa.String(length=190), - nullable=False, - server_default=''), - sa.Column('description', - sa.String(length=255), - nullable=True, - server_default=None), - sa.Column('select_by', - sa.dialects.mysql.LONGTEXT(), - nullable=True, - server_default=None), - sa.Column('group_by', - sa.dialects.mysql.LONGTEXT(length=20), - nullable=True, - server_default=None), - sa.Column('fire_criteria', - sa.dialects.mysql.LONGTEXT(length=20), - nullable=True, - server_default=None), - sa.Column('expiration', - sa.dialects.mysql.INTEGER(display_width=10, - unsigned=True), - nullable=True, - server_default='0'), - sa.Column('actions_enabled', - sa.Boolean(), - nullable=False, - server_default='1'), - sa.Column('created_at', - sa.DateTime(), - nullable=False), - sa.Column('updated_at', - sa.DateTime(), - nullable=False), - sa.Column('deleted_at', - sa.DateTime(), - nullable=True, - server_default=None), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('tenant_id', 'name', name='tenant_name'), - sa.Index('name', 'name'), - sa.Index('tenant_id', 'tenant_id'), - sa.Index('deleted_at', 'deleted_at'), - sa.Index('created_at', 'created_at'), - sa.Index('updated_at', 'updated_at'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') diff --git a/monasca_api/db/alembic/versions/6b2b88f3cab4_add_sub_alarm_state.py b/monasca_api/db/alembic/versions/6b2b88f3cab4_add_sub_alarm_state.py deleted file mode 100644 index 75c29096f..000000000 --- a/monasca_api/db/alembic/versions/6b2b88f3cab4_add_sub_alarm_state.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Add sub alarm state (Git revision 6b2b88f3cab46cd442369b22da3624611b871169) - -Revision ID: 6b2b88f3cab4 -Revises: 30181b42434b -Create Date: 2018-04-24 12:16:15.812274 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '6b2b88f3cab4' -down_revision = '30181b42434b' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column( - 'sub_alarm', - sa.Column('state', - sa.String(length=20), - sa.ForeignKey('alarm_state.name'), - nullable=False, - server_default='OK')) - - -def downgrade(): - op.drop_column('sub_alarm', 'state') diff --git a/monasca_api/db/alembic/versions/8781a256f0c1_add_inhibited_and_silenced_to_alarms.py b/monasca_api/db/alembic/versions/8781a256f0c1_add_inhibited_and_silenced_to_alarms.py deleted file mode 100644 index 53f60041c..000000000 --- a/monasca_api/db/alembic/versions/8781a256f0c1_add_inhibited_and_silenced_to_alarms.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Add inhibited and silenced to alarms (Git revision 8781a256f0c19662b81f04b014e2b769e625bd6b) - -Revision ID: 8781a256f0c1 -Revises: d8b801498850 -Create Date: 2018-04-24 13:16:04.157977 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '8781a256f0c1' -down_revision = 'd8b801498850' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('alarm', - sa.Column('inhibited', - sa.Boolean(), - nullable=False, - server_default='0')) - op.add_column('alarm', - sa.Column('silenced', - sa.Boolean(), - nullable=False, - server_default='0')) - - -def downgrade(): - op.drop_column('alarm', 'inhibited') - op.drop_column('alarm', 'silenced') diff --git a/monasca_api/db/alembic/versions/c2f85438d6f3_period_notifications.py b/monasca_api/db/alembic/versions/c2f85438d6f3_period_notifications.py deleted file mode 100644 index 8c22b0780..000000000 --- a/monasca_api/db/alembic/versions/c2f85438d6f3_period_notifications.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Add period to notifications (Git revision c2f85438d6f3b0fd2e1f86d84eee6e9967025eb6) - -Revision ID: c2f85438d6f3 -Revises: 0cce983d957a -Create Date: 2018-04-23 14:47:49.413502 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'c2f85438d6f3' -down_revision = '0cce983d957a' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('notification_method', - sa.Column('period', - sa.Integer(), - nullable=False, - server_default='0')) - - -def downgrade(): - op.drop_column('notification_method', 'period') diff --git a/monasca_api/db/alembic/versions/d8b801498850_remove_stream_action_types.py b/monasca_api/db/alembic/versions/d8b801498850_remove_stream_action_types.py deleted file mode 100644 index 243c46cdc..000000000 --- a/monasca_api/db/alembic/versions/d8b801498850_remove_stream_action_types.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Remove stream action types (Git revision d8b80149885016ede0ee403cf9bb07f9b7253297) - -Revision ID: d8b801498850 -Revises: 6b2b88f3cab4 -Create Date: 2018-04-24 12:53:02.342849 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'd8b801498850' -down_revision = '6b2b88f3cab4' -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_table('stream_actions_action_type') - - -def downgrade(): - stream_action_types = op.create_table( - 'stream_actions_action_type', - sa.Column('name', - sa.String(length=20), - nullable=False), - sa.PrimaryKeyConstraint('name'), - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_unicode_ci') - - op.bulk_insert(stream_action_types, - [{'name': 'FIRE'}, - {'name': 'EXPIRE'}]) diff --git a/monasca_api/db/alembic/versions/f69cb3152a76_remove_inhibited_silenced_from_alarms.py b/monasca_api/db/alembic/versions/f69cb3152a76_remove_inhibited_silenced_from_alarms.py deleted file mode 100644 index 75ef11147..000000000 --- a/monasca_api/db/alembic/versions/f69cb3152a76_remove_inhibited_silenced_from_alarms.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Remove inhibited and silenced from alarms (Git revision f69cb3152a76e7c586dcc9a03600d1d4ed32c4e6) - -Revision ID: f69cb3152a76 -Revises: 8781a256f0c1 -Create Date: 2018-04-24 13:16:04.157977 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'f69cb3152a76' -down_revision = '8781a256f0c1' -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_column('alarm', 'inhibited') - op.drop_column('alarm', 'silenced') - - -def downgrade(): - op.add_column('alarm', - sa.Column('inhibited', - sa.Boolean(), - nullable=False, - server_default='0')) - op.add_column('alarm', - sa.Column('silenced', - sa.Boolean(), - nullable=False, - server_default='0')) diff --git a/monasca_api/db/fingerprint.py b/monasca_api/db/fingerprint.py deleted file mode 100644 index 61a7fb435..000000000 --- a/monasca_api/db/fingerprint.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2018 SUSE Linux GmbH -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib - -from oslo_log import log -from oslo_utils import encodeutils -from sqlalchemy import MetaData -from sqlalchemy.orm import sessionmaker - -LOG = log.getLogger(__name__) - -# Map of SHA256 fingerprints to alembic revisions. Note that this is -# used in the pre-alembic case and does not need to be updated if a -# new revision is introduced. -_REVS = {"43e5913b0272077321ab6f25ffbcda7149b6284b": "00597b5c8325", - "c4e5c870c705421faa4041405b5a895970faa434": "0cce983d957a", - "f7a79c4eea9c9d130277a64eb6d2d16587088dbb": "30181b42434b", - "529f266f7ed42929d5405616810546e4615153e8": "6b2b88f3cab4", - "857904f960af77c0554c4c38d73ed47df7c949b4": "8781a256f0c1", - "773489fb7bfa84bf2db0e1ff1ab96bce7fb4ecd7": "c2f85438d6f3", - "f29f18a30519a1bae9dcee85a604eb72886e34d3": "d8b801498850", - "dd47cb01f11cb5cd7fec6bda6a190bc10b4659a6": "f69cb3152a76", - - # Database created with UTF8 default charset - "5dda7af1fd708095e6c9298976abb1242bbd1848": "8781a256f0c1", - "7fb1ce4a60f0065505096843bfd21f4ef4c5d1e0": "f69cb3152a76"} - - -class Fingerprint(object): - - def __init__(self, engine): - metadata = self._get_metadata(engine) - self.schema_raw = self._get_schema_raw(metadata) - self.sha256 = self._get_schema_sha256(self.schema_raw) - self.revision = self._get_revision(metadata, engine, self.sha256) - - @staticmethod - def _get_metadata(engine): - metadata_obj = MetaData() - metadata_obj.create_all(engine) - metadata_obj.reflect(engine) - return metadata_obj - - @staticmethod - def _get_schema_raw(metadata): - schema_strings = [] - - for table in metadata.sorted_tables: - # Omit this table to maintain a consistent fingerprint when - # fingerprint a migrated schema is fingerprinted. - if table.name == "alembic_version": - continue - table.metadata = None - columns = [] - for column in table.columns: - column.server_default = None - columns.append(repr(column)) - table.columns = [] - schema_strings.append(repr(table)) - - for column in columns: - schema_strings.append(" " + repr(column)) - - schema_strings.append("") - - return "\n".join(schema_strings) - - @staticmethod - def _get_schema_sha256(schema_raw): - return hashlib.sha256(encodeutils.to_utf8(schema_raw)).hexdigest() - - @staticmethod - def _get_revision(metadata, engine, sha256): - # Alembic stores the current version in the DB so check that first - # and fall back to the lookup table for the pre-alembic case. - versions_table = metadata.tables.get('alembic_version') - if versions_table is not None: - return Fingerprint._lookup_version_from_db(versions_table, engine) - elif sha256: - return Fingerprint._lookup_version_from_table(sha256) - - @staticmethod - def _get_db_session(engine): - Session = sessionmaker(bind=engine) - return Session() - - @staticmethod - def _lookup_version_from_db(versions_table, engine): - session = Fingerprint._get_db_session(engine) - # This will throw an exception for the unexpected case when there is - # more than one row. The query returns a tuple which is stripped off - # before returning. - return session.query(versions_table).one()[0] - - @staticmethod - def _lookup_version_from_table(sha256): - revision = _REVS.get(sha256) - if not revision: - LOG.warning("Fingerprint: {} does not match any revisions." - .format(sha256)) - return revision diff --git a/monasca_api/expression_parser/__init__.py b/monasca_api/expression_parser/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/expression_parser/alarm_expr_parser.py b/monasca_api/expression_parser/alarm_expr_parser.py deleted file mode 100644 index 63a931f8e..000000000 --- a/monasca_api/expression_parser/alarm_expr_parser.py +++ /dev/null @@ -1,380 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# (C) Copyright 2015-2017 Hewlett Packard Enterprise LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -import pyparsing -import six - -_DETERMINISTIC_ASSIGNMENT_LEN = 3 -_DETERMINISTIC_ASSIGNMENT_SHORT_LEN = 1 -_DETERMINISTIC_ASSIGNMENT_VALUE_INDEX = 2 -_DEFAULT_PERIOD = 60 -_DEFAULT_PERIODS = 1 - - -class SubExpr(object): - - def __init__(self, tokens): - - if not tokens.func: - if tokens.relational_op.lower() in ['gte', 'gt', '>=', '>']: - self._func = "max" - else: - self._func = "min" - else: - self._func = tokens.func - self._metric_name = tokens.metric_name - self._dimensions = tokens.dimensions_list - self._operator = tokens.relational_op - self._threshold = float(tokens.threshold) - if tokens.period: - self._period = int(tokens.period) - else: - self._period = _DEFAULT_PERIOD - if tokens.periods: - self._periods = int(tokens.periods) - else: - self._periods = _DEFAULT_PERIODS - self._deterministic = tokens.deterministic - self._id = None - - @property - def fmtd_sub_expr_str(self): - """Get the entire sub expressions as a string with spaces.""" - result = u"{}({}".format(self.normalized_func, - self._metric_name) - - if self._dimensions is not None: - result += "{" + self.dimensions_str + "}" - - if self._period != _DEFAULT_PERIOD: - result += ", {}".format(self._period) - - result += ")" - - result += " {} {}".format(self._operator, - self._threshold) - - if self._periods != _DEFAULT_PERIODS: - result += " times {}".format(self._periods) - - return result - - @property - def dimensions_str(self): - """Get all the dimensions as a single comma delimited string.""" - return u",".join(self._dimensions) - - @property - def operands_list(self): - """Get this sub expression as a list.""" - return [self] - - @property - def func(self): - """Get the function as it appears in the orig expression.""" - return self._func - - @property - def normalized_func(self): - """Get the function upper-cased.""" - return self._func.upper() - - @property - def metric_name(self): - """Get the metric name as it appears in the orig expression.""" - return self._metric_name - - @property - def normalized_metric_name(self): - """Get the metric name lower-cased.""" - return self._metric_name.lower() - - @property - def dimensions(self): - """Get the dimensions.""" - return u",".join(self._dimensions) - - @property - def dimensions_as_list(self): - """Get the dimensions as a list.""" - if self._dimensions: - return self._dimensions - else: - return [] - - @property - def operator(self): - """Get the operator.""" - return self._operator - - @property - def threshold(self): - """Get the threshold value.""" - return self._threshold - - @property - def period(self): - """Get the period. Default is 60 seconds.""" - if self._period: - return self._period - else: - return u'60' - - @property - def periods(self): - """Get the periods. Default is 1.""" - if self._periods: - return self._periods - else: - return u'1' - - @property - def deterministic(self): - return True if self._deterministic else False - - @property - def normalized_operator(self): - """Get the operator as one of LT, GT, LTE, or GTE.""" - if self._operator.lower() == "lt" or self._operator == "<": - return u"LT" - elif self._operator.lower() == "gt" or self._operator == ">": - return u"GT" - elif self._operator.lower() == "lte" or self._operator == "<=": - return u"LTE" - elif self._operator.lower() == "gte" or self._operator == ">=": - return u"GTE" - - @property - def id(self): - """Get the id used to identify this sub expression in the repo.""" - return self._id - - @id.setter - def id(self, id): - """Set the d used to identify this sub expression in the repo.""" - self._id = id - - -class BinaryOp(object): - def __init__(self, tokens): - self.op = tokens[0][1] - self.operands = tokens[0][0::2] - - @property - def operands_list(self): - return ([sub_operand for operand in self.operands for sub_operand in - operand.operands_list]) - - -class AndSubExpr(BinaryOp): - """Expand later as needed.""" - pass - - -class OrSubExpr(BinaryOp): - """Expand later as needed.""" - pass - - -COMMA = pyparsing.Suppress(pyparsing.Literal(",")) -LPAREN = pyparsing.Suppress(pyparsing.Literal("(")) -RPAREN = pyparsing.Suppress(pyparsing.Literal(")")) -EQUAL = pyparsing.Literal("=") -LBRACE = pyparsing.Suppress(pyparsing.Literal("{")) -RBRACE = pyparsing.Suppress(pyparsing.Literal("}")) - - -def periodValidation(instr, loc, tokens): - period = int(tokens[0]) - if period == 0: - raise pyparsing.ParseFatalException(instr, loc, - "Period must not be 0") - - if (period % 60) != 0: - raise pyparsing.ParseFatalException(instr, loc, - "Period {} must be a multiple of 60" - .format(period)) - # Must return the string - return tokens[0] - - -def periodsValidation(instr, loc, tokens): - periods = int(tokens[0]) - if periods < 1: - raise pyparsing.ParseFatalException(instr, loc, - "Periods {} must be 1 or greater" - .format(periods)) - # Must return the string - return tokens[0] - - -# Initialize non-ascii unicode code points in the Basic Multilingual Plane. -unicode_printables = u''.join( - six.unichr(c) for c in range(128, 65536) if not six.unichr(c).isspace()) - -# Does not like comma. No Literals from above allowed. -valid_identifier_chars = ( - (unicode_printables + pyparsing.alphanums + ".-_#!$%&'*+/:;?@[\\]^`|~")) - -metric_name = ( - pyparsing.Word(valid_identifier_chars, min=1, max=255)("metric_name")) -dimension_name = pyparsing.Word(valid_identifier_chars + ' ', min=1, max=255) -dimension_value = pyparsing.Word(valid_identifier_chars + ' ', min=1, max=255) - -MINUS = pyparsing.Literal('-') -integer_number = pyparsing.Word(pyparsing.nums) -decimal_number = (pyparsing.Optional(MINUS) + integer_number + - pyparsing.Optional("." + integer_number)) -decimal_number.setParseAction(lambda tokens: "".join(tokens)) - -max = pyparsing.CaselessLiteral("max") -min = pyparsing.CaselessLiteral("min") -avg = pyparsing.CaselessLiteral("avg") -count = pyparsing.CaselessLiteral("count") -sum = pyparsing.CaselessLiteral("sum") -last = pyparsing.CaselessLiteral("last") -func = (max | min | avg | count | sum | last)("func") - -less_than_op = ( - (pyparsing.CaselessLiteral("<") | pyparsing.CaselessLiteral("lt"))) -less_than_eq_op = ( - (pyparsing.CaselessLiteral("<=") | pyparsing.CaselessLiteral("lte"))) -greater_than_op = ( - (pyparsing.CaselessLiteral(">") | pyparsing.CaselessLiteral("gt"))) -greater_than_eq_op = ( - (pyparsing.CaselessLiteral(">=") | pyparsing.CaselessLiteral("gte"))) - -# Order is important. Put longer prefix first. -relational_op = ( - less_than_eq_op | less_than_op | greater_than_eq_op | greater_than_op)( - "relational_op") - -AND = pyparsing.CaselessLiteral("and") | pyparsing.CaselessLiteral("&&") -OR = pyparsing.CaselessLiteral("or") | pyparsing.CaselessLiteral("||") -logical_op = (AND | OR)("logical_op") - -times = pyparsing.CaselessLiteral("times") - -dimension = dimension_name + EQUAL + dimension_value -dimension.setParseAction(lambda tokens: "".join(tokens)) - -dimension_list = pyparsing.Group((LBRACE + pyparsing.Optional( - pyparsing.delimitedList(dimension)) + - RBRACE))("dimensions_list") - -metric = metric_name + pyparsing.Optional(dimension_list) -period = integer_number.copy().addParseAction(periodValidation)("period") -threshold = decimal_number("threshold") -periods = integer_number.copy().addParseAction(periodsValidation)("periods") - -deterministic = ( - pyparsing.CaselessLiteral('deterministic') -)('deterministic') - -function_and_metric = ( - func + LPAREN + metric + - pyparsing.Optional(COMMA + deterministic) + - pyparsing.Optional(COMMA + period) + - RPAREN -) - -expression = pyparsing.Forward() - -sub_expression = ((function_and_metric | metric) + relational_op + threshold + - pyparsing.Optional(times + periods) | - LPAREN + expression + RPAREN) -sub_expression.setParseAction(SubExpr) - -expression = ( - pyparsing.infixNotation(sub_expression, - [(AND, 2, pyparsing.opAssoc.LEFT, AndSubExpr), - (OR, 2, pyparsing.opAssoc.LEFT, OrSubExpr)])) - - -class AlarmExprParser(object): - def __init__(self, expr): - self._expr = expr - - @property - def sub_expr_list(self): - # Remove all spaces before parsing. Simple, quick fix for whitespace - # issue with dimension list not allowing whitespace after comma. - parse_result = (expression + pyparsing.stringEnd).parseString( - self._expr) - sub_expr_list = parse_result[0].operands_list - return sub_expr_list - - -def main(): - """Used for development and testing.""" - - expr_list = [ - "max(-_.千幸福的笑脸{घोड़ा=馬, " - "dn2=dv2,千幸福的笑脸घ=千幸福的笑脸घ}) gte 100 " - "times 3 && " - "(min(ເຮືອນ{dn3=dv3,家=дом}) < 10 or sum(biz{dn5=dv5}) >99 and " - "count(fizzle) lt 0or count(baz) > 1)".decode('utf8'), - - "max(foo{hostname=mini-mon,千=千}, 120) > 100 and (max(bar)>100 " - " or max(biz)>100)".decode('utf8'), - - "max(foo)>=100", - - "test_metric{this=that, that = this} < 1", - - "max ( 3test_metric5 { this = that }) lt 5 times 3", - - "3test_metric5 lt 3", - - "ntp.offset > 1 or ntp.offset < -5", - - "max(3test_metric5{it's this=that's it}) lt 5 times 3", - - "count(log.error{test=1}, deterministic) > 1.0", - - "count(log.error{test=1}, deterministic, 120) > 1.0", - - "last(test_metric{hold=here}) < 13", - - "count(log.error{test=1}, deterministic, 130) > 1.0", - - "count(log.error{test=1}, deterministic) > 1.0 times 0", - ] - - for expr in expr_list: - print('orig expr: {}'.format(expr.encode('utf8'))) - sub_exprs = [] - try: - alarm_expr_parser = AlarmExprParser(expr) - sub_exprs = alarm_expr_parser.sub_expr_list - except Exception as ex: - print("Parse failed: {}".format(ex)) - for sub_expr in sub_exprs: - print('sub expr: {}'.format( - sub_expr.fmtd_sub_expr_str.encode('utf8'))) - print('sub_expr dimensions: {}'.format( - sub_expr.dimensions_str.encode('utf8'))) - print('sub_expr deterministic: {}'.format( - sub_expr.deterministic)) - print('sub_expr period: {}'.format( - sub_expr.period)) - print("") - print("") - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/monasca_api/hacking/__init__.py b/monasca_api/hacking/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/hacking/checks.py b/monasca_api/hacking/checks.py deleted file mode 100644 index c4fa3fcee..000000000 --- a/monasca_api/hacking/checks.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -from hacking import core - -assert_no_xrange_re = re.compile(r"\s*xrange\s*\(") - - -@core.flake8ext -def no_xrange(logical_line): - """Do not use 'xrange' - B319 - """ - if assert_no_xrange_re.match(logical_line): - yield (0, "B319: Do not use xrange().") diff --git a/monasca_api/healthcheck/__init__.py b/monasca_api/healthcheck/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/healthcheck/alarms_db_check.py b/monasca_api/healthcheck/alarms_db_check.py deleted file mode 100644 index 6d32eaa8f..000000000 --- a/monasca_api/healthcheck/alarms_db_check.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from sqlalchemy import text - -from monasca_api.common.repositories.sqla import sql_repository -from monasca_api.healthcheck import base - -LOG = log.getLogger(__name__) - - -class AlarmsDbHealthCheck(base.BaseHealthCheck, - sql_repository.SQLRepository): - """Evaluates alarm db health - - Healthcheck verifies if: - * database is up and running, it is possible to establish connection - * sample sql query can be executed - - If following conditions are met health check return healthy status. - Otherwise unhealthy status is returned with explanation. - """ - - def health_check(self): - status = self.check_db_status() - return base.CheckResult(healthy=status[0], - message=status[1]) - - def check_db_status(self): - try: - with self._db_engine.connect() as con: - query = text('SELECT 1') - con.execute(query) - except Exception as ex: - LOG.exception(str(ex)) - return False, str(ex) - return True, 'OK' diff --git a/monasca_api/healthcheck/base.py b/monasca_api/healthcheck/base.py deleted file mode 100644 index 27e7ffa19..000000000 --- a/monasca_api/healthcheck/base.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import collections - -import six - - -class CheckResult(collections.namedtuple('CheckResult', ['healthy', 'message'])): - """Result for the health check - - healthy - boolean - message - string - """ - - -@six.add_metaclass(abc.ABCMeta) -class BaseHealthCheck(object): - """Abstract class implemented by the monasca-api healthcheck classes""" - - @abc.abstractmethod - def health_check(self): - """Evaluate health of given service""" - raise NotImplementedError # pragma: no cover diff --git a/monasca_api/healthcheck/kafka_check.py b/monasca_api/healthcheck/kafka_check.py deleted file mode 100644 index 74b8fb369..000000000 --- a/monasca_api/healthcheck/kafka_check.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log -from six import PY3 - -from monasca_api.healthcheck import base -from monasca_common.kafka_lib import client - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -class KafkaHealthCheck(base.BaseHealthCheck): - """Evaluates kafka health - - Healthcheck verifies if: - - * kafka server is up and running - * there is a configured topic in kafka - - If following conditions are met health check returns healthy status. - Otherwise unhealthy status is returned with message. - - Note: - Healthcheck checks 3 type of topics given in configuration: - metrics_topic, events_topic and alarm_state_transition_topic. - """ - - def health_check(self): - url = CONF.kafka.uri - - try: - kafka_client = client.KafkaClient(hosts=url) - except client.KafkaUnavailableError as ex: - LOG.error(repr(ex)) - error_str = 'Could not connect to Kafka at {0}'.format(url) - return base.CheckResult(healthy=False, message=error_str) - - status = self._verify_topics(kafka_client) - self._disconnect_gracefully(kafka_client) - - return base.CheckResult(healthy=status[0], - message=status[1]) - - @staticmethod - def _verify_topics(kafka_client): - topics = (CONF.kafka.metrics_topic, - CONF.kafka.events_topic, - CONF.kafka.alarm_state_transitions_topic) - if PY3: - topics = tuple(topic.encode('utf-8') for topic in topics) - - for topic in topics: - topic_exists = topic in kafka_client.topics - if not topic_exists: - error_str = 'Kafka: Topic {0} not found'.format(topic) - LOG.error(error_str) - return False, str(error_str) - return True, 'OK' - - @staticmethod - def _disconnect_gracefully(kafka_client): - try: - kafka_client.close() - except Exception: - LOG.exception('Closing Kafka Connection') diff --git a/monasca_api/healthcheck/keystone_protocol.py b/monasca_api/healthcheck/keystone_protocol.py deleted file mode 100644 index 60b665b56..000000000 --- a/monasca_api/healthcheck/keystone_protocol.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystonemiddleware import auth_token -from oslo_log import log - -LOG = log.getLogger(__name__) - -_SKIP_PATH = '/version', '/healthcheck' -"""Tuple of non-application endpoints""" - - -class SkippingAuthProtocol(auth_token.AuthProtocol): - """SkippingAuthProtocol to reach healthcheck endpoint - - Because healthcheck endpoints exists as endpoint, it - is hidden behind keystone filter thus a request - needs to authenticated before it is reached. - - Note: - SkippingAuthProtocol is lean customization - of :py:class:`keystonemiddleware.auth_token.AuthProtocol` - that disables keystone communication if request - is meant to reach healthcheck - - """ - - def process_request(self, request): - path = request.path - for p in _SKIP_PATH: - if path.endswith(p): - LOG.debug( - ('Request path is %s and it does not require keystone ' - 'communication'), path) - return None # return NONE to reach actual logic - - return super(SkippingAuthProtocol, self).process_request(request) - - -def filter_factory(global_conf, **local_conf): # pragma: no cover - """Return factory function for :py:class:`.SkippingAuthProtocol` - - :param global_conf: global configuration - :param local_conf: local configuration - :return: factory function - :rtype: function - """ - conf = global_conf.copy() - conf.update(local_conf) - - def auth_filter(app): - return SkippingAuthProtocol(app, conf) - - return auth_filter diff --git a/monasca_api/healthcheck/metrics_db_check.py b/monasca_api/healthcheck/metrics_db_check.py deleted file mode 100644 index 9c368abd6..000000000 --- a/monasca_api/healthcheck/metrics_db_check.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# (C) Copyright 2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_common.simport import simport -from oslo_config import cfg -from oslo_log import log - -from monasca_api.healthcheck import base - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class MetricsDbCheck(base.BaseHealthCheck): - """Evaluates metrics db health - - Healthcheck what type of database is used (InfluxDB, Cassandra) - and provide health according to the given db. - - If following conditions are met health check return healthy status. - Otherwise unhealthy status is returned with explanation. - """ - - def __init__(self): - try: - self._metrics_repo = simport.load( - CONF.repositories.metrics_driver) - - except Exception as ex: - LOG.exception(ex) - raise - - def health_check(self): - status = self._metrics_repo.check_status() - return base.CheckResult(healthy=status[0], - message=status[1]) diff --git a/monasca_api/healthchecks.py b/monasca_api/healthchecks.py deleted file mode 100644 index 53c06c2de..000000000 --- a/monasca_api/healthchecks.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon - -from monasca_api.api import healthcheck_api -from monasca_api.healthcheck import alarms_db_check -from monasca_api.healthcheck import kafka_check -from monasca_api.healthcheck import metrics_db_check -from monasca_api.v2.reference import helpers - - -class HealthChecks(healthcheck_api.HealthCheckApi): - CACHE_CONTROL = ['must-revalidate', 'no-cache', 'no-store'] - - HEALTHY_CODE_GET = falcon.HTTP_OK - HEALTHY_CODE_HEAD = falcon.HTTP_NO_CONTENT - NOT_HEALTHY_CODE = falcon.HTTP_SERVICE_UNAVAILABLE - - def __init__(self): - super(HealthChecks, self).__init__() - self._kafka_check = kafka_check.KafkaHealthCheck() - self._alarm_db_check = alarms_db_check.AlarmsDbHealthCheck() - self._metrics_db_check = metrics_db_check.MetricsDbCheck() - - def on_head(self, req, res): - res.status = self.HEALTHY_CODE_HEAD - res.cache_control = self.CACHE_CONTROL - - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:healthcheck']) - kafka_result = self._kafka_check.health_check() - alarms_db_result = self._alarm_db_check.health_check() - metrics_db_result = self._metrics_db_check.health_check() - - status_data = { - 'kafka': kafka_result.message, - 'alarms_database': alarms_db_result.message, - 'metrics_database': metrics_db_result.message - } - health = (kafka_result.healthy and alarms_db_result.healthy and - metrics_db_result.healthy) - res.status = (self.HEALTHY_CODE_GET - if health else self.NOT_HEALTHY_CODE) - res.cache_control = self.CACHE_CONTROL - res.text = helpers.to_json(status_data) diff --git a/monasca_api/policies/__init__.py b/monasca_api/policies/__init__.py deleted file mode 100644 index 413975193..000000000 --- a/monasca_api/policies/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import pkgutil - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import importutils - -from monasca_api.conf import security - -LOG = log.getLogger(__name__) -_BASE_MOD_PATH = 'monasca_api.policies.' -CONF = cfg.CONF - - -def roles_list_to_check_str(roles_list): - converted_roles_list = ["role:" + role if role != '@' else role for role in roles_list] - return ' or '.join(converted_roles_list) - - -security.register_opts(CONF) - - -def load_policy_modules(): - """Load all modules that contain policies. - - Method iterates over modules of :py:mod:`monasca_events_api.policies` - and imports only those that contain following methods: - - - list_rules - - """ - for modname in _list_module_names(): - mod = importutils.import_module(_BASE_MOD_PATH + modname) - if hasattr(mod, 'list_rules'): - yield mod - - -def _list_module_names(): - package_path = os.path.dirname(os.path.abspath(__file__)) - for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]): - if not (modname == "opts" and ispkg): - yield modname - - -def list_rules(): - """List all policy modules rules. - - Goes through all policy modules and yields their rules - - """ - all_rules = [] - for mod in load_policy_modules(): - rules = mod.list_rules() - all_rules.extend(rules) - return all_rules diff --git a/monasca_api/policies/alarms.py b/monasca_api/policies/alarms.py deleted file mode 100644 index 384e2d822..000000000 --- a/monasca_api/policies/alarms.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -CONF = cfg.CONF -DEFAULT_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.default_authorized_roles) -READ_ONLY_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.read_only_authorized_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:alarms:definition:post', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Create an alarm definition.', - operations=[ - { - 'path': '/v2.0/alarm-definitions/', - 'method': 'POST' - } - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:definition:get', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List or get the details of the specified alarm definition.', - operations=[ - { - 'path': '/v2.0/alarm-definitions/{alarm_definition_id}', - 'method': 'GET' - }, - { - 'path': '/v2.0/alarm-definitions', - 'method': 'GET' - } - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:definition:put', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update/Replace the specified alarm definition.', - operations=[ - { - 'path': '/v2.0/alarm-definitions/{alarm_definition_id}', - 'method': 'PUT' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:definition:patch', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update selected parameters of the specified alarm definition, ' - 'and enable/disable its actions.', - operations=[ - { - 'path': '/v2.0/alarm-definitions/{alarm_definition_id}', - 'method': 'PATCH' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:definition:delete', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Delete the specified alarm definition.', - operations=[ - { - 'path': '/v2.0/alarm-definitions/{alarm_definition_id}', - 'method': 'DELETE' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:put', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update/Replace the entire state of the specified alarm.', - operations=[ - { - 'path': '/v2.0/alarms/{alarm_id}', - 'method': 'PUT' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:patch', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update selected parameters of a specified alarm,' - ' set the alarm state and enable/disable it.', - operations=[ - { - 'path': '/v2.0/alarms/{alarm_id}', - 'method': 'PATCH' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:delete', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Delete the specified alarm.', - operations=[ - { - 'path': '/v2.0/alarms/{alarm_id}', - 'method': 'DELETE' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:get', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List or get the details of the specified alarm.', - operations=[ - { - 'path': '/v2.0/alarms/', - 'method': 'GET' - }, - { - 'path': '/v2.0/alarms/{alarm_id}', - 'method': 'GET' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:count', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='Get the number of alarms that match the criteria.', - operations=[ - { - 'path': '/v2.0/alarms/count/', - 'method': 'GET' - } - ] - ), - policy.DocumentedRuleDefault( - name='api:alarms:state_history', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List alarm state history for alarms.', - operations=[ - { - 'path': '/v2.0/alarms/state-history', - 'method': 'GET' - }, - { - 'path': '/v2.0/alarms/{alarm_id}/state-history', - 'method': 'GET' - } - ] - ) -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/delegate.py b/monasca_api/policies/delegate.py deleted file mode 100644 index be193fc79..000000000 --- a/monasca_api/policies/delegate.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -DELEGATE_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.delegate_authorized_roles) - -rules = [ - policy.RuleDefault( - name='api:delegate', - check_str=DELEGATE_AUTHORIZED_ROLES, - description='The rules which allow to access the API on' - ' behalf of another project (tenant).', - - ) -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/healthcheck.py b/monasca_api/policies/healthcheck.py deleted file mode 100644 index 8bd4b8f29..000000000 --- a/monasca_api/policies/healthcheck.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -CONF = cfg.CONF -HEALTHCHECK_ROLES = policies.roles_list_to_check_str(cfg.CONF.security.healthcheck_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:healthcheck', - check_str=HEALTHCHECK_ROLES, - description='Check healthiness.', - operations=[ - {'path': '/healthcheck', 'method': 'GET'} - ] - ), - policy.DocumentedRuleDefault( - name='api:healthcheck:head', - check_str=HEALTHCHECK_ROLES, - description='Healthcheck head rule', - operations=[ - {'path': '/healthcheck', 'method': 'HEAD'} - ] - ) -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/logs.py b/monasca_api/policies/logs.py deleted file mode 100644 index aa5d4eb14..000000000 --- a/monasca_api/policies/logs.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_api import policies -from oslo_config import cfg -from oslo_policy import policy - - -DEFAULT_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.default_authorized_roles) -AGENT_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.agent_authorized_roles) -DELEGATE_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.delegate_authorized_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:logs:post', - check_str=' or '.join(filter(None, [AGENT_AUTHORIZED_ROLES, - DEFAULT_AUTHORIZED_ROLES, - DELEGATE_AUTHORIZED_ROLES])), - description='Logs post rule', - operations=[ - {'path': '/logs', 'method': 'POST'}, - ] - ) -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/metrics.py b/monasca_api/policies/metrics.py deleted file mode 100644 index cf2614e23..000000000 --- a/monasca_api/policies/metrics.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -CONF = cfg.CONF -DEFAULT_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.default_authorized_roles) -READ_ONLY_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.read_only_authorized_roles) -AGENT_AUTHORIZED_ROLES = policies.roles_list_to_check_str(cfg.CONF.security.agent_authorized_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:metrics:get', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List metrics, measurements, metric statistics or metric names.', - operations=[ - {'path': '/v2.0/metrics', 'method': 'GET'}, - {'path': '/v2.0/metrics/measurements', 'method': 'GET'}, - {'path': '/v2.0/metrics/statistics', 'method': 'GET'}, - {'path': '/v2.0/metrics/names', 'method': 'GET'} - ] - ), - policy.DocumentedRuleDefault( - name='api:metrics:post', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + AGENT_AUTHORIZED_ROLES, - description='Create metrics.', - operations=[ - {'path': '/v2.0/metrics', 'method': 'POST'} - ] - ), - policy.DocumentedRuleDefault( - name='api:metrics:dimension:values', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List dimension values.', - operations=[ - {'path': '/v2.0/metrics/dimensions/names/values', 'method': 'GET'} - ] - ), - policy.DocumentedRuleDefault( - name='api:metrics:dimension:names', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List dimension names.', - operations=[ - {'path': '/v2.0/metrics/dimensions/names', 'method': 'GET'} - ] - ), -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/notifications.py b/monasca_api/policies/notifications.py deleted file mode 100644 index 8743205db..000000000 --- a/monasca_api/policies/notifications.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -CONF = cfg.CONF -DEFAULT_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.default_authorized_roles) -READ_ONLY_AUTHORIZED_ROLES = policies.roles_list_to_check_str( - cfg.CONF.security.read_only_authorized_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:notifications:put', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update the specified notification method.', - operations=[ - { - 'path': '/v2.0/notification-methods/{notification_method_id}', - 'method': 'PUT' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:notifications:patch', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Update selected parameters of the specified notification method.', - operations=[ - { - 'path': '/v2.0/notification-methods/{notification_method_id}', - 'method': 'PATCH' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:notifications:delete', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Delete the specified notification method.', - operations=[ - { - 'path': '/v2.0/notification-methods/{notification_method_id}', - 'method': 'DELETE' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:notifications:get', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List or get the details of the specified notification method.', - operations=[ - { - 'path': '/v2.0/notification-methods', - 'method': 'GET' - }, - { - 'path': '/v2.0/notification-methods/{notification_method_id}', - 'method': 'GET' - }, - ] - ), - policy.DocumentedRuleDefault( - name='api:notifications:post', - check_str=DEFAULT_AUTHORIZED_ROLES, - description='Create a notification method.', - operations=[ - { - 'path': '/v2.0/notification-methods', - 'method': 'POST' - } - ] - ), - policy.DocumentedRuleDefault( - name='api:notifications:type', - check_str=DEFAULT_AUTHORIZED_ROLES + ' or ' + READ_ONLY_AUTHORIZED_ROLES, - description='List supported notification method types.', - operations=[ - { - 'path': '/v2.0/notification-methods/types', - 'method': 'GET' - } - ] - ) -] - - -def list_rules(): - return rules diff --git a/monasca_api/policies/versions.py b/monasca_api/policies/versions.py deleted file mode 100644 index 1f1275375..000000000 --- a/monasca_api/policies/versions.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_policy import policy - -from monasca_api import policies - -CONF = cfg.CONF -VERSIONS_ROLES = policies.roles_list_to_check_str(cfg.CONF.security.versions_roles) - -rules = [ - policy.DocumentedRuleDefault( - name='api:versions', - check_str=VERSIONS_ROLES, - description='List supported versions ' - 'or get the details about the specified version of Monasca API.', - operations=[ - {'path': '/', 'method': 'GET'}, - {'path': '/v2.0', 'method': 'GET'} - ] - ), -] - - -def list_rules(): - return rules diff --git a/monasca_api/tests/__init__.py b/monasca_api/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/tests/base.py b/monasca_api/tests/base.py deleted file mode 100644 index 4b2ae8db2..000000000 --- a/monasca_api/tests/base.py +++ /dev/null @@ -1,233 +0,0 @@ -# coding=utf-8 -# Copyright 2015 kornicameister@gmail.com -# Copyright 2015-2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import codecs -import os -import random -import string - -import falcon -from falcon import testing -import fixtures -from monasca_api.common.policy import policy_engine as policy -from oslo_config import cfg -from oslo_config import fixture as oo_cfg -from oslo_context import fixture as oo_ctx -from oslo_serialization import jsonutils -from oslotest import base as oslotest_base -import six -import testtools.matchers as matchers - -from monasca_api.api.core import request -from monasca_api import conf -from monasca_api import config -from monasca_api import policies - - -policy.POLICIES = policies - - -class ConfigFixture(oo_cfg.Config): - """Mocks configuration""" - - def __init__(self): - super(ConfigFixture, self).__init__(config.CONF) - - def setUp(self): - super(ConfigFixture, self).setUp() - self.addCleanup(self._clean_config_loaded_flag) - conf.register_opts() - self._set_defaults() - config.parse_args(argv=[]) # prevent oslo from parsing test args - - @staticmethod - def _clean_config_loaded_flag(): - config._CONF_LOADED = False - - def _set_defaults(self): - self.conf.set_default('user', 'monasca', 'influxdb') - - -class BaseTestCase(oslotest_base.BaseTestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self.useFixture(ConfigFixture()) - self.useFixture(oo_ctx.ClearRequestContext()) - self.useFixture(PolicyFixture()) - - @staticmethod - def conf_override(**kw): - """Override flag variables for a test.""" - group = kw.pop('group', None) - for k, v in kw.items(): - cfg.CONF.set_override(k, v, group) - - @staticmethod - def conf_default(**kw): - """Override flag variables for a test.""" - group = kw.pop('group', None) - for k, v in kw.items(): - cfg.CONF.set_default(k, v, group) - - -class BaseApiTestCase(BaseTestCase, testing.TestCase): - - def setUp(self): - super(BaseApiTestCase, self).setUp() - # TODO(dszumski): Loading the app from api/server.py seems to make - # more sense here so that we don't have to manually keep the tests in - # sync with it. - self.app = falcon.App(request_type=request.Request) - # NOTE(dszumski): Falcon 2.0.0 switches the default for this from True - # to False so we explicitly set it here to prevent the behaviour - # changing between versions. - self.app.req_options.strip_url_path_trailing_slash = True - - @staticmethod - def create_environ(*args, **kwargs): - return testing.create_environ( - *args, - **kwargs - ) - - -class PolicyFixture(fixtures.Fixture): - """Override the policy with a completely new policy file. - - This overrides the policy with a completely fake and synthetic - policy file. - - """ - - def setUp(self): - super(PolicyFixture, self).setUp() - self._prepare_policy() - policy.reset() - policy.init() - - def _prepare_policy(self): - policy_dir = self.useFixture(fixtures.TempDir()) - policy_file = os.path.join(policy_dir.path, 'policy.yaml') - # load the fake_policy data and add the missing default rules. - policy_rules = jsonutils.loads('{}') - self.add_missing_default_rules(policy_rules) - with open(policy_file, 'w') as f: - jsonutils.dump(policy_rules, f) - - BaseTestCase.conf_override(policy_file=policy_file, - group='oslo_policy') - BaseTestCase.conf_override(policy_dirs=[], group='oslo_policy') - - @staticmethod - def add_missing_default_rules(rules): - for rule in policies.list_rules(): - if rule.name not in rules: - rules[rule.name] = rule.check_str - - -class RESTResponseEquals(object): - """Match if the supplied data contains a single string containing a JSON - object which decodes to match expected_data, excluding the contents of - the 'links' key. - """ - - def __init__(self, expected_data): - self.expected_data = expected_data - - if u"links" in expected_data: - del expected_data[u"links"] - - def __str__(self): - return 'RESTResponseEquals(%s)' % (self.expected_data,) - - def match(self, actual): - response_data = actual.json - - if u"links" in response_data: - del response_data[u"links"] - - return matchers.Equals(self.expected_data).match(response_data) - - -def generate_unique_message(size): - letters = string.ascii_letters - - def rand(amount, space=True): - space = ' ' if space else '' - return ''.join((random.choice(letters + space) for _ in range(amount))) - - return rand(size) - - -def _hex_to_unicode(hex_raw): - hex_raw = six.b(hex_raw.replace(' ', '')) - hex_str_raw = codecs.getdecoder('hex')(hex_raw)[0] - hex_str = hex_str_raw.decode('utf-8', 'replace') - return hex_str - - -# NOTE(trebskit) => http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt -UNICODE_MESSAGES = [ - # Unicode is evil... - {'case': 'arabic', 'input': 'يونيكود هو الشر'}, - {'case': 'polish', 'input': 'Unicode to zło'}, - {'case': 'greek', 'input': 'Unicode είναι κακό'}, - {'case': 'portuguese', 'input': 'Unicode é malvado'}, - {'case': 'lao', 'input': 'unicode ເປັນຄວາມຊົ່ວຮ້າຍ'}, - {'case': 'german', 'input': 'Unicode ist böse'}, - {'case': 'japanese', 'input': 'ユニコードは悪です'}, - {'case': 'russian', 'input': 'Unicode - зло'}, - {'case': 'urdu', 'input': 'یونیسیڈ برائی ہے'}, - {'case': 'weird', 'input': '🆄🅽🅸🅲🅾🅳🅴 🅸🆂 🅴🆅🅸🅻...'}, # funky, huh ? - # conditions from link above - # 2.3 Other boundary conditions - {'case': 'stress_2_3_1', 'input': _hex_to_unicode('ed 9f bf')}, - {'case': 'stress_2_3_2', 'input': _hex_to_unicode('ee 80 80')}, - {'case': 'stress_2_3_3', 'input': _hex_to_unicode('ef bf bd')}, - {'case': 'stress_2_3_4', 'input': _hex_to_unicode('f4 8f bf bf')}, - {'case': 'stress_2_3_5', 'input': _hex_to_unicode('f4 90 80 80')}, - # 3.5 Impossible byes - {'case': 'stress_3_5_1', 'input': _hex_to_unicode('fe')}, - {'case': 'stress_3_5_2', 'input': _hex_to_unicode('ff')}, - {'case': 'stress_3_5_3', 'input': _hex_to_unicode('fe fe ff ff')}, - # 4.1 Examples of an overlong ASCII character - {'case': 'stress_4_1_1', 'input': _hex_to_unicode('c0 af')}, - {'case': 'stress_4_1_2', 'input': _hex_to_unicode('e0 80 af')}, - {'case': 'stress_4_1_3', 'input': _hex_to_unicode('f0 80 80 af')}, - {'case': 'stress_4_1_4', 'input': _hex_to_unicode('f8 80 80 80 af')}, - {'case': 'stress_4_1_5', 'input': _hex_to_unicode('fc 80 80 80 80 af')}, - # 4.2 Maximum overlong sequences - {'case': 'stress_4_2_1', 'input': _hex_to_unicode('c1 bf')}, - {'case': 'stress_4_2_2', 'input': _hex_to_unicode('e0 9f bf')}, - {'case': 'stress_4_2_3', 'input': _hex_to_unicode('f0 8f bf bf')}, - {'case': 'stress_4_2_4', 'input': _hex_to_unicode('f8 87 bf bf bf')}, - {'case': 'stress_4_2_5', 'input': _hex_to_unicode('fc 83 bf bf bf bf')}, - # 4.3 Overlong representation of the NUL character - {'case': 'stress_4_3_1', 'input': _hex_to_unicode('c0 80')}, - {'case': 'stress_4_3_2', 'input': _hex_to_unicode('e0 80 80')}, - {'case': 'stress_4_3_3', 'input': _hex_to_unicode('f0 80 80 80')}, - {'case': 'stress_4_3_4', 'input': _hex_to_unicode('f8 80 80 80 80')}, - {'case': 'stress_4_3_5', 'input': _hex_to_unicode('fc 80 80 80 80 80')}, - # and some cheesy example from polish novel 'Pan Tadeusz' - {'case': 'mr_t', 'input': 'Hajże na Soplicę!'}, - # it won't be complete without that one - {'case': 'mr_b', 'input': 'Grzegorz Brzęczyszczykiewicz, ' - 'Chrząszczyżewoszyce, powiat Łękołody'}, - # great success, christmas time - {'case': 'olaf', 'input': '☃'} -] diff --git a/monasca_api/tests/cmd/__init__.py b/monasca_api/tests/cmd/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/tests/cmd/test_status.py b/monasca_api/tests/cmd/test_status.py deleted file mode 100644 index 93a6855a1..000000000 --- a/monasca_api/tests/cmd/test_status.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2018 NEC, Corp. -# Copyright (c) 2018 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import unittest - -from oslo_config import cfg -from oslo_upgradecheck.upgradecheck import Code - -from monasca_api.cmd import status - - -class TestUpgradeChecks(unittest.TestCase): - - def setUp(self): - super(TestUpgradeChecks, self).setUp() - self.cmd = status.Checks() - cfg.CONF(args=[], project='magnum') - - def test_checks(self): - for name, func in self.cmd._upgrade_checks: - if isinstance(func, tuple): - func_name, kwargs = func - result = func_name(self, **kwargs) - else: - result = func(self) - self.assertEqual(Code.SUCCESS, result.code) diff --git a/monasca_api/tests/config.py b/monasca_api/tests/config.py deleted file mode 100644 index 8654361aa..000000000 --- a/monasca_api/tests/config.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from monasca_api import config -from monasca_api.tests import base - - -class TestConfig(base.BaseTestCase): - - @mock.patch('monasca_log_api.config.sys') - def test_should_return_true_if_runs_under_gunicorn(self, sys_patch): - sys_patch.argv = [ - '/bin/gunicorn', - '--capture-output', - '--paste', - 'etc/monasca/log-api-paste.ini', - '--workers', - '1' - ] - sys_patch.executable = '/bin/python' - self.assertTrue(config._is_running_under_gunicorn()) - - @mock.patch('monasca_log_api.config.sys') - def test_should_return_false_if_runs_without_gunicorn(self, sys_patch): - sys_patch.argv = ['/bin/monasca-log-api'] - sys_patch.executable = '/bin/python' - self.assertFalse(config._is_running_under_gunicorn()) diff --git a/monasca_api/tests/db/__init__.py b/monasca_api/tests/db/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/tests/db/test_fingerprint.py b/monasca_api/tests/db/test_fingerprint.py deleted file mode 100644 index 84aadf052..000000000 --- a/monasca_api/tests/db/test_fingerprint.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2018 StackHPC Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib -from unittest import mock - -import monasca_api.db.fingerprint as fingerprint -from monasca_api.tests import base - - -class TestFingerprint(base.BaseTestCase): - - @mock.patch('monasca_api.db.fingerprint.Fingerprint._get_metadata') - @mock.patch('monasca_api.db.fingerprint.Fingerprint._get_schema_raw') - def test_get_schema_raw_pre_alembic(self, mock_schema_raw, mock_metadata): - mock_schema_raw.return_value = 'dummy_schema_raw' - - tables = mock.PropertyMock = { - 'dummy_table': 'dummy_columns' - } - mock_metadata.return_value.tables = tables - - # No Alembic revision ID exists in the DB so we look it up from the - # table of fingerprints. Since we use a dummy schema, we insert a dummy - # entry into the lookup table. - fingerprint._REVS[ - hashlib.sha256(b'dummy_schema_raw').hexdigest()] = 'dummy_revision' - - f = fingerprint.Fingerprint('mock_engine') - self.assertEqual(f.schema_raw, 'dummy_schema_raw') - self.assertEqual(f.sha256, hashlib.sha256(b'dummy_schema_raw').hexdigest()) - self.assertEqual(f.revision, 'dummy_revision') - - @mock.patch('monasca_api.db.fingerprint.Fingerprint._get_db_session') - @mock.patch('monasca_api.db.fingerprint.Fingerprint._get_metadata') - @mock.patch('monasca_api.db.fingerprint.Fingerprint._get_schema_raw') - def test_get_schema_raw_post_alembic( - self, mock_schema_raw, mock_metadata, mock_db_session): - mock_schema_raw.return_value = 'dummy_schema_raw' - - tables = mock.PropertyMock = { - 'alembic_version': 'dummy_version', - 'dummy_table': 'dummy_columns' - } - mock_metadata.return_value.tables = tables - - # Alembic sets the version in the DB, so we look it up from there - mock_db_session.return_value.query.return_value.one.return_value = ( - 'dummy_revision',) - - f = fingerprint.Fingerprint('mock_engine') - self.assertEqual(f.schema_raw, 'dummy_schema_raw') - self.assertEqual(f.sha256, hashlib.sha256(b'dummy_schema_raw').hexdigest()) - self.assertEqual(f.revision, 'dummy_revision') diff --git a/monasca_api/tests/policy/__init__.py b/monasca_api/tests/policy/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/tests/policy/base.py b/monasca_api/tests/policy/base.py deleted file mode 100644 index 4a52e6532..000000000 --- a/monasca_api/tests/policy/base.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2017 OP5 AB -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Base classes for policy unit tests.""" - -import os - -import fixtures - -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_policy import opts as policy_opts -from oslo_serialization import jsonutils -from oslotest import base - -from monasca_api.common.policy import policy_engine - -CONF = cfg.CONF - - -class FakePolicy(object): - def list_rules(self): - return [] - - -class ConfigFixture(config_fixture.Config): - - def setUp(self): - super(ConfigFixture, self).setUp() - CONF(args=[], - prog='api', - project='monasca', - version=0, - description='Testing monasca-api.common') - policy_opts.set_defaults(CONF) - - -class BaseTestCase(base.BaseTestCase): - def setUp(self): - super(BaseTestCase, self).setUp() - self.useFixture(ConfigFixture(CONF)) - self.useFixture(EmptyPolicyFixture()) - - @staticmethod - def conf_override(**kw): - """Override flag variables for a test.""" - group = kw.pop('group', None) - for k, v in kw.items(): - CONF.set_override(k, v, group) - - -class EmptyPolicyFixture(fixtures.Fixture): - """Override the policy with an empty policy file. - - This overrides the policy with a completely fake and synthetic - policy file. - - """ - def setUp(self): - super(EmptyPolicyFixture, self).setUp() - self._prepare_policy() - policy_engine.POLICIES = FakePolicy() - policy_engine.reset() - policy_engine.init() - self.addCleanup(policy_engine.reset) - - def _prepare_policy(self): - - policy_dir = self.useFixture(fixtures.TempDir()) - policy_file = os.path.join(policy_dir.path, 'policy.yaml') - - policy_rules = jsonutils.loads('{}') - - self.add_missing_default_rules(policy_rules) - - with open(policy_file, 'w') as f: - jsonutils.dump(policy_rules, f) - - BaseTestCase.conf_override(policy_file=policy_file, - group='oslo_policy') - BaseTestCase.conf_override(policy_dirs=[], group='oslo_policy') - - def add_missing_default_rules(self, rules): - policies = FakePolicy() - - for rule in policies.list_rules(): - if rule.name not in rules: - rules[rule.name] = rule.check_str diff --git a/monasca_api/tests/policy/test_policy.py b/monasca_api/tests/policy/test_policy.py deleted file mode 100644 index 9808c922e..000000000 --- a/monasca_api/tests/policy/test_policy.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright 2017 OP5 AB -# Copyright 2011 Piston Cloud Computing, Inc. -# All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import requests_mock -from unittest import mock - -from oslo_context import context -from oslo_policy import policy as os_policy - -from monasca_api.common.policy import policy_engine -from monasca_api.tests.policy import base - - -class PolicyFileTestCase(base.BaseTestCase): - def setUp(self): - super(PolicyFileTestCase, self).setUp() - self.context = context.RequestContext(user='fake', - project_id='fake', - is_admin=False) - self.target = {} - - def test_modified_policy_reloads(self): - tmp_file = \ - self.create_tempfiles(files=[('policies', '{}')], ext='.yaml')[0] - base.BaseTestCase.conf_override(policy_file=tmp_file, - group='oslo_policy') - - policy_engine.reset() - policy_engine.init() - - action = 'example:test' - rule = os_policy.RuleDefault(action, '') - policy_engine._ENFORCER.register_defaults([rule]) - - with open(tmp_file, 'w') as policy_file: - policy_file.write('{"example:test": ""}') - policy_engine.authorize(self.context, action, self.target) - - with open(tmp_file, 'w') as policy_file: - policy_file.write('{"example:test": "!"}') - policy_engine._ENFORCER.load_rules(True) - self.assertRaises(os_policy.PolicyNotAuthorized, - policy_engine.authorize, - self.context, action, self.target) - - -class PolicyTestCase(base.BaseTestCase): - def setUp(self): - super(PolicyTestCase, self).setUp() - rules = [ - os_policy.RuleDefault("true", "@"), - os_policy.RuleDefault("example:allowed", "@"), - os_policy.RuleDefault("example:denied", "!"), - os_policy.RuleDefault("old_action_not_default", "@"), - os_policy.RuleDefault("new_action", "@"), - os_policy.RuleDefault("old_action_default", "rule:admin_api"), - os_policy.RuleDefault("example:lowercase_admin", - "role:admin or role:sysadmin"), - os_policy.RuleDefault("example:uppercase_admin", - "role:ADMIN or role:sysadmin"), - os_policy.RuleDefault("example:get_http", - "http://www.example.com"), - os_policy.RuleDefault("example:my_file", - "role:compute_admin or " - "project_id:%(project_id)s"), - os_policy.RuleDefault("example:early_and_fail", "! and @"), - os_policy.RuleDefault("example:early_or_success", "@ or !"), - ] - policy_engine.reset() - policy_engine.init() - - self.context = context.RequestContext(user='fake', - project_id='fake', - is_admin=False) - policy_engine._ENFORCER.register_defaults(rules) - self.target = {} - - def test_authorize_nonexistent_action_throws(self): - - action = 'example:noexists' - self.assertRaises(os_policy.PolicyNotRegistered, policy_engine.authorize, - self.context, action, self.target) - - def test_authorize_bad_action_throws(self): - action = 'example:denied' - self.assertRaises(os_policy.PolicyNotAuthorized, policy_engine.authorize, - self.context, action, self.target) - - def test_authorize_bad_action_noraise(self): - action = "example:denied" - result = policy_engine.authorize(self.context, action, self.target, False) - self.assertFalse(result) - - def test_authorize_good_action(self): - action = "example:allowed" - result = policy_engine.authorize(self.context, action, self.target) - self.assertTrue(result) - - @requests_mock.mock() - def test_authorize_http_true(self, req_mock): - req_mock.post('http://www.example.com/', - text='True') - action = "example:get_http" - target = {} - result = policy_engine.authorize(self.context, action, target) - self.assertTrue(result) - - @requests_mock.mock() - def test_authorize_http_false(self, req_mock): - req_mock.post('http://www.example.com/', - text='False') - action = "example:get_http" - target = {} - self.assertRaises(os_policy.PolicyNotAuthorized, policy_engine.authorize, - self.context, action, target) - - def test_templatized_authorization(self): - target_mine = {'project_id': 'fake'} - target_not_mine = {'project_id': 'another'} - action = "example:my_file" - policy_engine.authorize(self.context, action, target_mine) - self.assertRaises(os_policy.PolicyNotAuthorized, policy_engine.authorize, - self.context, action, target_not_mine) - - def test_early_AND_authorization(self): - action = "example:early_and_fail" - self.assertRaises(os_policy.PolicyNotAuthorized, policy_engine.authorize, - self.context, action, self.target) - - def test_early_OR_authorization(self): - action = "example:early_or_success" - policy_engine.authorize(self.context, action, self.target) - - def test_ignore_case_role_check(self): - lowercase_action = "example:lowercase_admin" - uppercase_action = "example:uppercase_admin" - # NOTE(dprince) we mix case in the Admin role here to ensure - # case is ignored - admin_context = context.RequestContext('admin', - 'fake', - roles=['AdMiN']) - policy_engine.authorize(admin_context, lowercase_action, self.target) - policy_engine.authorize(admin_context, uppercase_action, self.target) - - @mock.patch.object(policy_engine.LOG, 'warning') - def test_warning_when_deprecated_user_based_rule_used(self, mock_warning): - policy_engine._warning_for_deprecated_user_based_rules( - [("os_compute_api:servers:index", - "project_id:%(project_id)s or user_id:%(user_id)s")]) - mock_warning.assert_called_once_with( - u"The user_id attribute isn't supported in the rule " - "'%s'. All the user_id based policy enforcement will be removed " - "in the future.", "os_compute_api:servers:index") - - @mock.patch.object(policy_engine.LOG, 'warning') - def test_no_warning_for_user_based_resource(self, mock_warning): - policy_engine._warning_for_deprecated_user_based_rules( - [("os_compute_api:os-keypairs:index", - "user_id:%(user_id)s")]) - mock_warning.assert_not_called() - - @mock.patch.object(policy_engine.LOG, 'warning') - def test_no_warning_for_no_user_based_rule(self, mock_warning): - policy_engine._warning_for_deprecated_user_based_rules( - [("os_compute_api:servers:index", - "project_id:%(project_id)s")]) - mock_warning.assert_not_called() - - @mock.patch.object(policy_engine.LOG, 'warning') - def test_verify_deprecated_policy_using_old_action(self, mock_warning): - policy_engine._ENFORCER.load_rules(True) - old_policy = "old_action_not_default" - new_policy = "new_action" - default_rule = "rule:admin_api" - - using_old_action = policy_engine.verify_deprecated_policy( - old_policy, new_policy, default_rule, self.context) - - mock_warning.assert_called_once_with( - "Start using the new action '{0}'. The existing action '{1}' is " - "being deprecated and will be removed in " - "future release.".format(new_policy, old_policy)) - self.assertTrue(using_old_action) - - def test_verify_deprecated_policy_using_new_action(self): - policy_engine._ENFORCER.load_rules(True) - old_policy = "old_action_default" - new_policy = "new_action" - default_rule = "rule:admin_api" - - using_old_action = policy_engine.verify_deprecated_policy( - old_policy, new_policy, default_rule, self.context) - - self.assertFalse(using_old_action) - - -class IsAdminCheckTestCase(base.BaseTestCase): - def setUp(self): - super(IsAdminCheckTestCase, self).setUp() - policy_engine.init() - - def test_init_true(self): - check = policy_engine.IsAdminCheck('is_admin', 'True') - - self.assertEqual(check.kind, 'is_admin') - self.assertEqual(check.match, 'True') - self.assertTrue(check.expected) - - def test_init_false(self): - check = policy_engine.IsAdminCheck('is_admin', 'nottrue') - - self.assertEqual(check.kind, 'is_admin') - self.assertEqual(check.match, 'False') - self.assertFalse(check.expected) - - def test_call_true(self): - check = policy_engine.IsAdminCheck('is_admin', 'True') - - self.assertTrue(check('target', dict(is_admin=True), - policy_engine._ENFORCER)) - self.assertFalse(check('target', dict(is_admin=False), - policy_engine._ENFORCER)) - - def test_call_false(self): - check = policy_engine.IsAdminCheck('is_admin', 'False') - - self.assertFalse(check('target', dict(is_admin=True), - policy_engine._ENFORCER)) - self.assertTrue(check('target', dict(is_admin=False), - policy_engine._ENFORCER)) - - -class AdminRolePolicyTestCase(base.BaseTestCase): - def setUp(self): - super(AdminRolePolicyTestCase, self).setUp() - self.noadmin_context = context.RequestContext('fake', 'fake', - roles=['member']) - self.admin_context = context.RequestContext('fake', 'fake', - roles=['admin']) - - admin_rule = [ - os_policy.RuleDefault('example.admin', 'role:admin'), - ] - policy_engine.reset() - policy_engine.init(policy_file=None) - policy_engine._ENFORCER.register_defaults(admin_rule) - policy_engine._ENFORCER.load_rules(True) - self.target = {} - - def test_authorize_admin_actions_with_admin_context(self): - for action in policy_engine.get_rules().keys(): - policy_engine.authorize(self.admin_context, action, self.target) - - def test_authorize_admin_actions_with_nonadmin_context_throws(self): - """Check if non-admin context passed to admin actions throws - Policy not authorized exception - """ - for action in policy_engine.get_rules().keys(): - self.assertRaises(os_policy.PolicyNotAuthorized, - policy_engine.authorize, - self.noadmin_context, action, self.target) diff --git a/monasca_api/tests/sqlite_alarm.sql b/monasca_api/tests/sqlite_alarm.sql deleted file mode 100644 index 5d17f21be..000000000 --- a/monasca_api/tests/sqlite_alarm.sql +++ /dev/null @@ -1,123 +0,0 @@ -PRAGMA synchronous = OFF; -PRAGMA journal_mode = MEMORY; -BEGIN TRANSACTION; -CREATE TABLE `alarm_state` ( - `name` varchar(20) NOT NULL, - PRIMARY KEY (`name`) -); -CREATE TABLE `alarm_definition_severity` ( - `name` varchar(20) NOT NULL, - PRIMARY KEY (`name`) -); -CREATE TABLE `notification_method_type` ( - `name` varchar(20) NOT NULL, - PRIMARY KEY (`name`) -); -CREATE TABLE `notification_method` ( - `id` varchar(36) NOT NULL, - `tenant_id` varchar(36) NOT NULL, - `name` varchar(250) DEFAULT NULL, - `type` varchar(20) NOT NULL, - `address` varchar(512) DEFAULT NULL, - `period` int NOT NULL DEFAULT 0, - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); -CREATE TABLE `alarm_definition` ( - `id` varchar(36) NOT NULL, - `tenant_id` varchar(36) NOT NULL, - `name` varchar(255) NOT NULL DEFAULT '', - `description` varchar(255) DEFAULT NULL, - `expression` longtext NOT NULL, - `severity` varchar(20) NOT NULL, - `match_by` varchar(255) DEFAULT '', - `actions_enabled` tinyint(1) NOT NULL DEFAULT '1', - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - `deleted_at` datetime DEFAULT NULL, - PRIMARY KEY (`id`) -); -CREATE TABLE `alarm` ( - `id` varchar(36) NOT NULL, - `alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `state` varchar(20) NOT NULL, - `lifecycle_state` varchar(50) DEFAULT NULL, - `link` varchar(512) DEFAULT NULL, - `created_at` datetime NOT NULL, - `state_updated_at` datetime, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); -CREATE TABLE `alarm_action` ( - `alarm_definition_id` varchar(36) NOT NULL, - `alarm_state` varchar(20) NOT NULL, - `action_id` varchar(36) NOT NULL, - PRIMARY KEY (`alarm_definition_id`,`alarm_state`,`action_id`) -); -CREATE TABLE `alarm_metric` ( - `alarm_id` varchar(36) NOT NULL, - `metric_definition_dimensions_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - PRIMARY KEY (`alarm_id`,`metric_definition_dimensions_id`) -); -CREATE TABLE `metric_definition` ( - `id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `name` varchar(255) NOT NULL, - `tenant_id` varchar(36) NOT NULL, - `region` varchar(255) NOT NULL DEFAULT '', - PRIMARY KEY (`id`) -); -CREATE TABLE `metric_definition_dimensions` ( - `id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `metric_definition_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `metric_dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - PRIMARY KEY (`id`) -); -CREATE TABLE `metric_dimension` ( - `dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - `name` varchar(255) NOT NULL DEFAULT '', - `value` varchar(255) NOT NULL DEFAULT '' -); -CREATE TABLE `sub_alarm_definition` ( - `id` varchar(36) NOT NULL, - `alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `function` varchar(10) NOT NULL, - `metric_name` varchar(100) DEFAULT NULL, - `operator` varchar(5) NOT NULL, - `threshold` double NOT NULL, - `period` int(11) NOT NULL, - `periods` int(11) NOT NULL, - `is_deterministic` tinyint(1) NOT NULL DEFAULT(0), - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); -CREATE TABLE `sub_alarm_definition_dimension` ( - `sub_alarm_definition_id` varchar(36) NOT NULL DEFAULT '', - `dimension_name` varchar(255) NOT NULL DEFAULT '', - `value` varchar(255) DEFAULT NULL -); -CREATE TABLE `sub_alarm` ( - `id` varchar(36) NOT NULL, - `alarm_id` varchar(36) NOT NULL DEFAULT '', - `sub_expression_id` varchar(36) NOT NULL DEFAULT '', - `expression` longtext NOT NULL, - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -); - -insert into `alarm_state` values ('UNDETERMINED'); -insert into `alarm_state` values ('OK'); -insert into `alarm_state` values ('ALARM'); - -insert into `alarm_definition_severity` values ('LOW'); -insert into `alarm_definition_severity` values ('MEDIUM'); -insert into `alarm_definition_severity` values ('HIGH'); -insert into `alarm_definition_severity` values ('CRITICAL'); - -insert into `notification_method_type` values ('EMAIL'); -insert into `notification_method_type` values ('WEBHOOK'); -insert into `notification_method_type` values ('PAGERDUTY'); - -END TRANSACTION; diff --git a/monasca_api/tests/test_a_repository.py b/monasca_api/tests/test_a_repository.py deleted file mode 100644 index b339c8775..000000000 --- a/monasca_api/tests/test_a_repository.py +++ /dev/null @@ -1,1142 +0,0 @@ -# Copyright 2015 Cray -# Copyright 2016 FUJITSU LIMITED -# Copyright 2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import time - -import fixtures -from oslo_config import cfg -from oslo_db.sqlalchemy.engines import create_engine -from sqlalchemy import delete, MetaData, insert, bindparam - -from monasca_api.common.repositories.sqla import models -from monasca_api.tests import base - -CONF = cfg.CONF - - -class TestAlarmRepoDB(base.BaseTestCase): - - @classmethod - def setUpClass(cls): - engine = create_engine('sqlite://') - - qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read() - sconn = engine.raw_connection() - c = sconn.cursor() - c.executescript(qry) - sconn.commit() - c.close() - cls.engine = engine - - def _fake_engine_from_config(*args, **kw): - return cls.engine - cls.fixture = fixtures.MonkeyPatch( - 'sqlalchemy.create_engine', _fake_engine_from_config) - cls.fixture.setUp() - - metadata = MetaData() - - cls.aa = models.create_aa_model(metadata) - cls._delete_aa_query = delete(cls.aa) - cls._insert_aa_query = (insert(cls.aa) - .values( - alarm_definition_id=bindparam('alarm_definition_id'), - alarm_state=bindparam('alarm_state'), - action_id=bindparam('action_id'))) - - cls.ad = models.create_ad_model(metadata) - cls._delete_ad_query = delete(cls.ad) - cls._insert_ad_query = (insert(cls.ad) - .values( - id=bindparam('id'), - tenant_id=bindparam('tenant_id'), - name=bindparam('name'), - severity=bindparam('severity'), - expression=bindparam('expression'), - match_by=bindparam('match_by'), - actions_enabled=bindparam('actions_enabled'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'), - deleted_at=bindparam('deleted_at'))) - cls.sad = models.create_sad_model(metadata) - cls._delete_sad_query = delete(cls.sad) - cls._insert_sad_query = (insert(cls.sad) - .values( - id=bindparam('id'), - alarm_definition_id=bindparam('alarm_definition_id'), - function=bindparam('function'), - metric_name=bindparam('metric_name'), - operator=bindparam('operator'), - threshold=bindparam('threshold'), - period=bindparam('period'), - periods=bindparam('periods'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - cls.sadd = models.create_sadd_model(metadata) - cls._delete_sadd_query = delete(cls.sadd) - cls._insert_sadd_query = (insert(cls.sadd) - .values( - sub_alarm_definition_id=bindparam('sub_alarm_definition_id'), - dimension_name=bindparam('dimension_name'), - value=bindparam('value'))) - - cls.nm = models.create_nm_model(metadata) - cls._delete_nm_query = delete(cls.nm) - cls._insert_nm_query = (insert(cls.nm) - .values( - id=bindparam('id'), - tenant_id=bindparam('tenant_id'), - name=bindparam('name'), - type=bindparam('type'), - address=bindparam('address'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - cls.a = models.create_a_model(metadata) - cls._delete_a_query = delete(cls.a) - cls._insert_a_query = (insert(cls.a) - .values( - id=bindparam('id'), - alarm_definition_id=bindparam('alarm_definition_id'), - state=bindparam('state'), - lifecycle_state=bindparam('lifecycle_state'), - link=bindparam('link'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'), - state_updated_at=bindparam('state_updated_at'))) - - cls.sa = models.create_sa_model(metadata) - cls._delete_sa_query = delete(cls.sa) - cls._insert_sa_query = (insert(cls.sa) - .values( - id=bindparam('id'), - sub_expression_id=bindparam('sub_expression_id'), - alarm_id=bindparam('alarm_id'), - expression=bindparam('expression'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - cls.am = models.create_am_model(metadata) - cls._delete_am_query = delete(cls.am) - cls._insert_am_query = (insert(cls.am) - .values( - alarm_id=bindparam('alarm_id'), - metric_definition_dimensions_id=bindparam( - 'metric_definition_dimensions_id'))) - - cls.md = models.create_md_model(metadata) - cls._delete_md_query = delete(cls.md) - cls._insert_md_query = (insert(cls.md) - .values( - dimension_set_id=bindparam('dimension_set_id'), - name=bindparam('name'), - value=bindparam('value'))) - - cls.mdd = models.create_mdd_model(metadata) - cls._delete_mdd_query = delete(cls.mdd) - cls._insert_mdd_query = (insert(cls.mdd) - .values( - id=bindparam('id'), - metric_definition_id=bindparam('metric_definition_id'), - metric_dimension_set_id=bindparam('metric_dimension_set_id'))) - - cls.mde = models.create_mde_model(metadata) - cls._delete_mde_query = delete(cls.mde) - cls._insert_mde_query = (insert(cls.mde) - .values( - id=bindparam('id'), - name=bindparam('name'), - tenant_id=bindparam('tenant_id'), - region=bindparam('region'))) - - @classmethod - def tearDownClass(cls): - cls.fixture.cleanUp() - if hasattr(CONF, 'sql_engine'): - delattr(CONF, 'sql_engine') - - def setUp(self): - super(TestAlarmRepoDB, self).setUp() - - self.conf_override(connection='sqlite://', group='database') - - from monasca_api.common.repositories.sqla import alarms_repository as ar - self.repo = ar.AlarmsRepository() - - timestamp1 = datetime.datetime(2015, 3, 14, 9, 26, 53) - timestamp2 = datetime.datetime(2015, 3, 14, 9, 26, 54) - timestamp3 = datetime.datetime(2015, 3, 14, 9, 26, 55) - timestamp4 = datetime.datetime(2015, 3, 15, 9, 26, 53) - - self.default_as = [{'id': '1', - 'alarm_definition_id': '1', - 'state': 'OK', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'created_at': timestamp1, - 'updated_at': timestamp1, - 'state_updated_at': timestamp1}, - {'id': '2', - 'alarm_definition_id': '1', - 'state': 'UNDETERMINED', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'created_at': timestamp2, - 'updated_at': timestamp2, - 'state_updated_at': timestamp2}, - {'id': '3', - 'alarm_definition_id': '1', - 'state': 'ALARM', - 'lifecycle_state': None, - 'link': 'http://somesite.com/this-alarm-info', - 'created_at': timestamp3, - 'updated_at': timestamp3, - 'state_updated_at': timestamp3}, - {'id': '234111', - 'alarm_definition_id': '234', - 'state': 'UNDETERMINED', - 'lifecycle_state': None, - 'link': None, - 'created_at': timestamp4, - 'updated_at': timestamp4, - 'state_updated_at': timestamp4}] - - self.default_ads = [{'id': '1', - 'tenant_id': 'bob', - 'name': '90% CPU', - 'severity': 'LOW', - 'expression': 'AVG(cpu.idle_perc{flavor_id=777,' - ' image_id=888, device=1}) > 10', - 'match_by': 'flavor_id,image_id', - 'actions_enabled': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now(), - 'deleted_at': None}, - {'id': '234', - 'tenant_id': 'bob', - 'name': '50% CPU', - 'severity': 'CRITICAL', - 'expression': 'AVG(cpu.sys_mem' - '{service=monitoring})' - ' > 20 and AVG(cpu.idle_perc' - '{service=monitoring}) < 10', - 'match_by': 'hostname,region', - 'actions_enabled': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now(), - 'deleted_at': None}] - - self.default_sadds = [{'sub_alarm_definition_id': '111', - 'dimension_name': 'flavor_id', - 'value': '777'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'image_id', - 'value': '888'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'metric_name', - 'value': 'cpu'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'device', - 'value': '1'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'flavor_id', - 'value': '777'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'image_id', - 'value': '888'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'metric_name', - 'value': 'mem'}] - - self.default_nms = [{'id': '29387234', - 'tenant_id': 'alarm-test', - 'name': 'MyEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '77778687', - 'tenant_id': 'alarm-test', - 'name': 'OtherEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}] - - self.default_aas = [{'alarm_definition_id': '123', - 'alarm_state': 'ALARM', - 'action_id': '29387234'}, - {'alarm_definition_id': '123', - 'alarm_state': 'ALARM', - 'action_id': '77778687'}, - {'alarm_definition_id': '234', - 'alarm_state': 'ALARM', - 'action_id': '29387234'}, - {'alarm_definition_id': '234', - 'alarm_state': 'ALARM', - 'action_id': '77778687'}] - - self.default_sads = [{'id': '43', - 'alarm_definition_id': '234', - 'function': 'f_43', - 'metric_name': 'm_43', - 'operator': 'GT', - 'threshold': 0, - 'period': 1, - 'periods': 2, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '45', - 'alarm_definition_id': '234', - 'function': 'f_45', - 'metric_name': 'm_45', - 'operator': 'GT', - 'threshold': 0, - 'period': 1, - 'periods': 2, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '47', - 'alarm_definition_id': '234', - 'function': 'f_47', - 'metric_name': 'm_47', - 'operator': 'GT', - 'threshold': 0, - 'period': 1, - 'periods': 2, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '8484', - 'alarm_definition_id': '234', - 'function': 'f_49', - 'metric_name': 'm_49', - 'operator': 'GT', - 'threshold': 0, - 'period': 1, - 'periods': 2, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '8686', - 'alarm_definition_id': '234', - 'function': 'f_51', - 'metric_name': 'm_51', - 'operator': 'GT', - 'threshold': 0, - 'period': 1, - 'periods': 2, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}] - - self.default_sas = [{'sub_expression_id': '43', - 'id': '42', - 'alarm_id': '1', - 'expression': 'avg(cpu.idle_perc{flavor_id=777,' - ' image_id=888, device=1}) > 10', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'sub_expression_id': '45', - 'id': '43', - 'alarm_id': '2', - 'expression': 'avg(cpu.idle_perc{flavor_id=777,' - ' image_id=888, device=1}) > 10', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'sub_expression_id': '47', - 'id': '44', - 'alarm_id': '3', - 'expression': 'avg(cpu.idle_perc{flavor_id=777,' - ' image_id=888, device=1}) > 10', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}] - - self.default_ams = [{'alarm_id': '1', - 'metric_definition_dimensions_id': b'11'}, - {'alarm_id': '1', - 'metric_definition_dimensions_id': b'22'}, - {'alarm_id': '2', - 'metric_definition_dimensions_id': b'11'}, - {'alarm_id': '3', - 'metric_definition_dimensions_id': b'22'}, - {'alarm_id': '234111', - 'metric_definition_dimensions_id': b'31'}, - {'alarm_id': '234111', - 'metric_definition_dimensions_id': b'32'}] - - self.default_mdes = [{'id': b'1', - 'name': 'cpu.idle_perc', - 'tenant_id': 'bob', - 'region': 'west'}, - {'id': b'111', - 'name': 'cpu.sys_mem', - 'tenant_id': 'bob', - 'region': 'west'}, - {'id': b'112', - 'name': 'cpu.idle_perc', - 'tenant_id': 'bob', - 'region': 'west'}] - - self.default_mdds = [{'id': b'11', - 'metric_definition_id': b'1', - 'metric_dimension_set_id': b'1'}, - {'id': b'22', - 'metric_definition_id': b'1', - 'metric_dimension_set_id': b'2'}, - {'id': b'31', - 'metric_definition_id': b'111', - 'metric_dimension_set_id': b'21'}, - {'id': b'32', - 'metric_definition_id': b'112', - 'metric_dimension_set_id': b'22'}] - - self.default_mds = [{'dimension_set_id': b'1', - 'name': 'instance_id', - 'value': '123'}, - {'dimension_set_id': b'1', - 'name': 'service', - 'value': 'monitoring'}, - {'dimension_set_id': b'2', - 'name': 'flavor_id', - 'value': '222'}, - {'dimension_set_id': b'22', - 'name': 'flavor_id', - 'value': '333'}, - {'dimension_set_id': b'21', - 'name': 'service', - 'value': 'monitoring'}, - {'dimension_set_id': b'22', - 'name': 'service', - 'value': 'monitoring'}, - {'dimension_set_id': b'21', - 'name': 'hostname', - 'value': 'roland'}, - {'dimension_set_id': b'22', - 'name': 'hostname', - 'value': 'roland'}, - {'dimension_set_id': b'21', - 'name': 'region', - 'value': 'colorado'}, - {'dimension_set_id': b'22', - 'name': 'region', - 'value': 'colorado'}, - {'dimension_set_id': b'22', - 'name': 'extra', - 'value': 'vivi'}] - - self.alarm1 = {'alarm_definition': {'id': '1', - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'metrics': [{'dimensions': {'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}, - {'dimensions': {'flavor_id': '222'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'} - self.alarm2 = {'alarm_definition': {'id': '1', - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:54Z', - 'id': '2', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'metrics': [{'dimensions': {'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'UNDETERMINED', - 'state_updated_timestamp': '2015-03-14T09:26:54Z', - 'updated_timestamp': '2015-03-14T09:26:54Z'} - self.alarm_compound = {'alarm_definition': {'id': '234', - 'name': '50% CPU', - 'severity': 'CRITICAL'}, - 'created_timestamp': '2015-03-15T09:26:53Z', - 'id': '234111', - 'lifecycle_state': None, - 'link': None, - 'metrics': [ - {'dimensions': {'hostname': 'roland', - 'region': 'colorado', - 'service': 'monitoring'}, - 'name': 'cpu.sys_mem'}, - {'dimensions': {'extra': 'vivi', - 'flavor_id': '333', - 'hostname': 'roland', - 'region': 'colorado', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'UNDETERMINED', - 'state_updated_timestamp': - '2015-03-15T09:26:53Z', - 'updated_timestamp': '2015-03-15T09:26:53Z'} - self.alarm3 = {'alarm_definition': {'id': '1', - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:55Z', - 'id': '3', - 'lifecycle_state': None, - 'link': 'http://somesite.com/this-alarm-info', - 'metrics': [{'dimensions': {'flavor_id': '222'}, - 'name': 'cpu.idle_perc'}], - 'state': 'ALARM', - 'state_updated_timestamp': '2015-03-14T09:26:55Z', - 'updated_timestamp': '2015-03-14T09:26:55Z'} - - with self.engine.begin() as conn: - conn.execute(self._delete_am_query) - conn.execute(self._insert_am_query, self.default_ams) - conn.execute(self._delete_md_query) - conn.execute(self._insert_md_query, self.default_mds) - conn.execute(self._delete_mdd_query) - conn.execute(self._insert_mdd_query, self.default_mdds) - conn.execute(self._delete_a_query) - conn.execute(self._insert_a_query, self.default_as) - conn.execute(self._delete_sa_query) - conn.execute(self._insert_sa_query, self.default_sas) - conn.execute(self._delete_mde_query) - conn.execute(self._insert_mde_query, self.default_mdes) - conn.execute(self._delete_ad_query) - conn.execute(self._insert_ad_query, self.default_ads) - conn.execute(self._delete_sad_query) - conn.execute(self._insert_sad_query, self.default_sads) - conn.execute(self._delete_sadd_query) - conn.execute(self._insert_sadd_query, self.default_sadds) - conn.execute(self._delete_nm_query) - conn.execute(self._insert_nm_query, self.default_nms) - conn.execute(self._delete_aa_query) - conn.execute(self._insert_aa_query, self.default_aas) - - def helper_builder_result(self, alarm_rows): - result = [] - - if not alarm_rows: - return result - - # Forward declaration - alarm = {} - prev_alarm_id = None - for alarm_row in alarm_rows: - if prev_alarm_id != alarm_row['alarm_id']: - if prev_alarm_id is not None: - result.append(alarm) - - ad = {u'id': alarm_row['alarm_definition_id'], - u'name': alarm_row['alarm_definition_name'], - u'severity': alarm_row['severity'], } - - metrics = [] - alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics, - u'state': alarm_row['state'], - u'lifecycle_state': alarm_row['lifecycle_state'], - u'link': alarm_row['link'], - u'state_updated_timestamp': - alarm_row['state_updated_timestamp'].isoformat() + - 'Z', - u'updated_timestamp': - alarm_row['updated_timestamp'].isoformat() + 'Z', - u'created_timestamp': - alarm_row['created_timestamp'].isoformat() + 'Z', - u'alarm_definition': ad} - - prev_alarm_id = alarm_row['alarm_id'] - - dimensions = {} - metric = {u'name': alarm_row['metric_name'], - u'dimensions': dimensions} - - if alarm_row['metric_dimensions']: - for dimension in alarm_row['metric_dimensions'].split(','): - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - metrics.append(metric) - - result.append(alarm) - return result - - def test_should_delete(self): - tenant_id = 'bob' - alarm_id = '1' - - alarm1 = self.repo.get_alarm(tenant_id, alarm_id) - alarm1 = self.helper_builder_result(alarm1) - self.assertEqual(alarm1[0], self.alarm1) - self.repo.delete_alarm(tenant_id, alarm_id) - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.get_alarm, tenant_id, alarm_id) - - def test_should_throw_exception_on_delete(self): - tenant_id = 'bob' - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.delete_alarm, tenant_id, 'Not an alarm ID') - - def test_should_find_alarm_def(self): - tenant_id = 'bob' - alarm_id = '1' - - expected = {'actions_enabled': False, - 'deleted_at': None, - 'description': None, - 'expression': 'AVG(cpu.idle_perc{flavor_id=777,' - ' image_id=888, device=1}) > 10', - 'id': '1', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'severity': 'LOW', - 'tenant_id': 'bob'} - alarm_def = self.repo.get_alarm_definition(tenant_id, alarm_id) - expected['created_at'] = alarm_def['created_at'] - expected['updated_at'] = alarm_def['updated_at'] - self.assertEqual(alarm_def, expected) - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.get_alarm_definition, - tenant_id, 'Not an alarm ID') - - def test_should_find(self): - res = self.repo.get_alarms(tenant_id='Not a tenant id', limit=1) - self.assertEqual(res, []) - - tenant_id = 'bob' - res = self.repo.get_alarms(tenant_id=tenant_id, limit=1000) - res = self.helper_builder_result(res) - - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - - self.assertEqual(res, expected) - - alarm_def_id = self.alarm_compound['alarm_definition']['id'] - query_parms = {'alarm_definition_id': alarm_def_id} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.sys_mem'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.idle_perc'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'metric_dimensions': {'flavor_id': '222'}} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm3] - - self.assertEqual(res, expected) - - query_parms = {'metric_dimensions': {'flavor_id': '333'}} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'metric_dimensions': {'flavor_id': '222|333'}} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm_compound, - self.alarm3] - - self.assertEqual(res, expected) - - query_parms = {'metric_dimensions': {'flavor_id': ''}} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm_compound, - self.alarm3] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'metric_dimensions': {'service': 'monitoring', - 'hostname': 'roland'}} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'state': 'UNDETERMINED'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm2, - self.alarm_compound] - - self.assertEqual(res, expected) - - alarm_def_id = self.alarm1['alarm_definition']['id'] - query_parms = {'metric_name': 'cpu.idle_perc', - 'metric_dimensions': {'service': 'monitoring'}, - 'alarm_definition_id': alarm_def_id} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2] - - self.assertEqual(res, expected) - - alarm_def_id = self.alarm1['alarm_definition']['id'] - query_parms = {'metric_name': 'cpu.idle_perc', - 'alarm_definition_id': alarm_def_id} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm3] - - self.assertEqual(res, expected) - - alarm_def_id = self.alarm_compound['alarm_definition']['id'] - query_parms = {'alarm_definition_id': alarm_def_id, - 'state': 'UNDETERMINED'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.sys_mem', - 'state': 'UNDETERMINED'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'metric_dimensions': {'service': 'monitoring'}, - 'state': 'UNDETERMINED'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm2, - self.alarm_compound] - - self.assertEqual(res, expected) - - time_now = datetime.datetime.now().isoformat() + 'Z' - query_parms = {'metric_name': 'cpu.idle_perc', - 'metric_dimensions': {'service': 'monitoring'}, - 'state': 'UNDETERMINED', - 'state_updated_start_time': time_now} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [] - - self.assertEqual(res, expected) - - time_now = '2015-03-15T00:00:00.0Z' - query_parms = {'state_updated_start_time': time_now} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - - self.assertEqual(res, expected) - - time_now = '2015-03-14T00:00:00.0Z' - query_parms = {'state_updated_start_time': time_now} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - - self.assertEqual(res, expected) - - query_parms = {'state_updated_start_time': time_now, - 'link': 'http://google.com', - 'lifecycle_state': 'OPEN'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=None, - offset='10') - res = self.helper_builder_result(res) - expected = [] - - self.assertEqual(res, expected) - - query_parms = {'severity': 'LOW'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm3] - self.assertEqual(expected, res) - - query_parms = {'severity': 'CRITICAL'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm_compound] - self.assertEqual(expected, res) - - query_parms = {'severity': 'LOW|CRITICAL'} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - self.assertEqual(expected, res) - - def test_should_count(self): - tenant_id = 'bob' - - res = self.repo.get_alarms_count(tenant_id=tenant_id) - self.assertEqual([{'count': 4}], res) - - res = self.repo.get_alarms_count(tenant_id=tenant_id, - limit=1000) - self.assertEqual([{'count': 4}], res) - - res = self.repo.get_alarms_count(tenant_id=tenant_id, - limit=1000, - offset=10) - self.assertEqual([], res) - - alarm_def_id = self.alarm_compound['alarm_definition']['id'] - query_parms = {'alarm_definition_id': alarm_def_id} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 1}], res) - - query_parms = {'metric_name': 'cpu.sys_mem'} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 1}], res) - - query_parms = {'state': 'UNDETERMINED'} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 2}], res) - - time_now = '2015-03-15T00:00:00.0Z' - query_parms = {'state_updated_start_time': time_now} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 1}], res) - - query_parms = {'severity': 'LOW'} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 3}], res) - - query_parms = {'lifecycle_state': 'OPEN'} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 2}], res) - - query_parms = {'link': 'http://somesite.com/this-alarm-info'} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 3}], res) - - query_parms = {'metric_dimensions': {'flavor_id': '222'}} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 2}], res) - - query_parms = {'group_by': ['metric_name']} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [{'count': 4, 'metric_name': 'cpu.idle_perc'}, - {'count': 1, 'metric_name': 'cpu.sys_mem'}] - self.assertEqual(expected, res) - - query_parms = {'group_by': ['dimension_name']} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [{'count': 1, 'dimension_name': 'extra'}, - {'count': 3, 'dimension_name': 'flavor_id'}, - {'count': 1, 'dimension_name': 'hostname'}, - {'count': 2, 'dimension_name': 'instance_id'}, - {'count': 1, 'dimension_name': 'region'}, - {'count': 3, 'dimension_name': 'service'}] - self.assertEqual(expected, res) - - query_parms = {'group_by': ['dimension_value']} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [{'count': 2, 'dimension_value': '123'}, - {'count': 2, 'dimension_value': '222'}, - {'count': 1, 'dimension_value': '333'}, - {'count': 1, 'dimension_value': 'colorado'}, - {'count': 3, 'dimension_value': 'monitoring'}, - {'count': 1, 'dimension_value': 'roland'}, - {'count': 1, 'dimension_value': 'vivi'}] - self.assertEqual(expected, res) - - query_parms = {'group_by': []} - res = self.repo.get_alarms_count(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - self.assertEqual([{'count': 4}], res) - - def test_should_sort_and_find(self): - tenant_id = 'bob' - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_id']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_definition_id']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm3, - self.alarm_compound] - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_definition_name']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [self.alarm_compound, - self.alarm1, - self.alarm2, - self.alarm3] - res = self.helper_builder_result(res) - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['severity']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [self.alarm1, - self.alarm2, - self.alarm3, - self.alarm_compound] - res = self.helper_builder_result(res) - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['state']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - res = self.helper_builder_result(res) - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_id asc']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_id desc']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm3, - self.alarm_compound, - self.alarm2, - self.alarm1] - self.assertEqual(expected, res) - - query_parms = {'metric_name': 'cpu.idle_perc', - 'sort_by': ['alarm_id nfl']} - res = self.repo.get_alarms(tenant_id=tenant_id, - query_parms=query_parms, - limit=1000) - res = self.helper_builder_result(res) - expected = [self.alarm1, - self.alarm2, - self.alarm_compound, - self.alarm3] - self.assertEqual(expected, res) - - def test_should_update(self): - tenant_id = 'bob' - alarm_id = '2' - - alarm = self.repo.get_alarm(tenant_id, alarm_id) - alarm = self.helper_builder_result(alarm)[0] - original_state_updated_date = alarm['state_updated_timestamp'] - original_updated_timestamp = alarm['updated_timestamp'] - self.assertEqual(alarm['state'], 'UNDETERMINED') - - prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None) - alarm_new = self.repo.get_alarm(tenant_id, alarm_id) - alarm_new = self.helper_builder_result(alarm_new)[0] - new_state_updated_date = alarm_new['state_updated_timestamp'] - new_updated_timestamp = alarm_new['updated_timestamp'] - self.assertNotEqual(original_updated_timestamp, - new_updated_timestamp, - 'updated_at did not change') - self.assertNotEqual(original_state_updated_date, - new_state_updated_date, - 'state_updated_at did not change') - for k in ('state', 'link', 'lifecycle_state'): - self.assertEqual(alarm[k], prev_state[k]) - - alarm['state_updated_timestamp'] = alarm_new['state_updated_timestamp'] - alarm['updated_timestamp'] = alarm_new['updated_timestamp'] - alarm['state'] = alarm_new['state'] - alarm['link'] = alarm_new['link'] - alarm['lifecycle_state'] = alarm_new['lifecycle_state'] - - self.assertEqual(alarm, alarm_new) - - time.sleep(1) - prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None) - alarm_unchanged = self.repo.get_alarm(tenant_id, alarm_id) - alarm_unchanged = self.helper_builder_result(alarm_unchanged)[0] - unchanged_state_updated_date = alarm_unchanged['state_updated_timestamp'] - unchanged_updated_timestamp = alarm_unchanged['updated_timestamp'] - self.assertNotEqual(unchanged_updated_timestamp, - new_updated_timestamp, - 'updated_at did not change') - self.assertEqual(unchanged_state_updated_date, - new_state_updated_date, - 'state_updated_at did change') - for k in ('state', 'link', 'lifecycle_state'): - self.assertEqual(alarm_new[k], prev_state[k]) - - def test_should_throw_exception_on_update(self): - tenant_id = 'bob' - alarm_id = 'Not real alarm id' - from monasca_api.common.repositories import exceptions - - self.assertRaises(exceptions.DoesNotExistException, - self.repo.update_alarm, - tenant_id, - alarm_id, - 'UNDETERMINED', - None, - None) - - def test_get_alarm_metrics(self): - alarm_id = '2' - alarm_metrics = self.repo.get_alarm_metrics(alarm_id) - - expected = [{'alarm_id': '2', - 'dimensions': 'instance_id=123,service=monitoring', - 'name': 'cpu.idle_perc'}] - - self.assertEqual(alarm_metrics, expected) - - def test_get_subalarms(self): - tenant_id = 'bob' - alarm_id = '2' - - sub_alarms = self.repo.get_sub_alarms(tenant_id, alarm_id) - expected = [{'alarm_definition_id': '1', - 'alarm_id': '2', - 'expression': 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10', - 'sub_alarm_id': '43'}] - self.assertEqual(sub_alarms, expected) diff --git a/monasca_api/tests/test_ad_repository.py b/monasca_api/tests/test_ad_repository.py deleted file mode 100644 index dcbc83fda..000000000 --- a/monasca_api/tests/test_ad_repository.py +++ /dev/null @@ -1,943 +0,0 @@ -# Copyright 2015 Cray -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -import fixtures -from oslo_config import cfg -from oslo_db.sqlalchemy.engines import create_engine -from sqlalchemy import delete, MetaData, insert, bindparam, select, func - -from monasca_api.common.repositories import exceptions -from monasca_api.common.repositories.model import sub_alarm_definition -from monasca_api.common.repositories.sqla import models -from monasca_api.expression_parser import alarm_expr_parser -from monasca_api.tests import base - -CONF = cfg.CONF -ALARM_DEF_123_FIELDS = {'actions_enabled': False, - 'alarm_actions': u'29387234,77778687', - 'description': None, - 'expression': u'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': u'123', - 'match_by': u'flavor_id,image_id', - 'name': u'90% CPU', - 'ok_actions': None, - 'severity': u'LOW', - 'undetermined_actions': None} -TENANT_ID = 'bob' - - -class TestAlarmDefinitionRepoDB(base.BaseTestCase): - - @classmethod - def setUpClass(cls): - engine = create_engine('sqlite://') - qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read() - sconn = engine.raw_connection() - c = sconn.cursor() - c.executescript(qry) - sconn.commit() - c.close() - cls.engine = engine - - def _fake_engine_from_config(*args, **kw): - return cls.engine - - cls.fixture = fixtures.MonkeyPatch( - 'sqlalchemy.create_engine', _fake_engine_from_config) - cls.fixture.setUp() - metadata = MetaData() - - cls.aa = models.create_aa_model(metadata) - cls._delete_aa_query = delete(cls.aa) - cls._insert_aa_query = (insert(cls.aa).values( - alarm_definition_id=bindparam('alarm_definition_id'), - alarm_state=bindparam('alarm_state'), - action_id=bindparam('action_id'))) - - cls.ad = models.create_ad_model(metadata) - cls._delete_ad_query = delete(cls.ad) - cls._insert_ad_query = (insert(cls.ad).values( - id=bindparam('id'), - tenant_id=bindparam('tenant_id'), - name=bindparam('name'), - severity=bindparam('severity'), - expression=bindparam('expression'), - match_by=bindparam('match_by'), - actions_enabled=bindparam('actions_enabled'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'), - deleted_at=bindparam('deleted_at'))) - - cls.sad = models.create_sad_model(metadata) - cls._delete_sad_query = delete(cls.sad) - cls._insert_sad_query = (insert(cls.sad).values( - id=bindparam('id'), - alarm_definition_id=bindparam('alarm_definition_id'), - function=bindparam('function'), - metric_name=bindparam('metric_name'), - operator=bindparam('operator'), - threshold=bindparam('threshold'), - period=bindparam('period'), - periods=bindparam('periods'), - is_deterministic=bindparam('is_deterministic'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - cls.sadd = models.create_sadd_model(metadata) - cls._delete_sadd_query = delete(cls.sadd) - cls._insert_sadd_query = (insert(cls.sadd).values( - sub_alarm_definition_id=bindparam('sub_alarm_definition_id'), - dimension_name=bindparam('dimension_name'), - value=bindparam('value'))) - - cls.nm = models.create_nm_model(metadata) - cls._delete_nm_query = delete(cls.nm) - cls._insert_nm_query = (insert(cls.nm).values( - id=bindparam('id'), - tenant_id=bindparam('tenant_id'), - name=bindparam('name'), - type=bindparam('type'), - address=bindparam('address'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - @classmethod - def tearDownClass(cls): - cls.fixture.cleanUp() - if hasattr(CONF, 'sql_engine'): - delattr(CONF, 'sql_engine') - - def setUp(self): - super(TestAlarmDefinitionRepoDB, self).setUp() - self.conf_default(connection='sqlite://', group='database') - - from monasca_api.common.repositories.sqla import alarm_definitions_repository as adr - self.repo = adr.AlarmDefinitionsRepository() - self.default_ads = [{'id': '123', - 'tenant_id': 'bob', - 'name': '90% CPU', - 'severity': 'LOW', - 'expression': 'AVG(hpcs.compute{flavor_id=777,' - ' image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'match_by': 'flavor_id,image_id', - 'actions_enabled': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now(), - 'deleted_at': None}, - {'id': '234', - 'tenant_id': 'bob', - 'name': '50% CPU', - 'severity': 'LOW', - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20' - ' and AVG(hpcs.compute) < 100', - 'match_by': 'flavor_id,image_id', - 'actions_enabled': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now(), - 'deleted_at': None}] - - self.default_sads = [{'id': '111', - 'alarm_definition_id': '123', - 'function': 'AVG', - 'metric_name': 'hpcs.compute', - 'operator': 'GT', - 'threshold': 10, - 'period': 60, - 'periods': 1, - 'is_deterministic': True, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '222', - 'alarm_definition_id': '234', - 'function': 'AVG', - 'metric_name': 'hpcs.compute', - 'operator': 'GT', - 'threshold': 20, - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '223', - 'alarm_definition_id': '234', - 'function': 'AVG', - 'metric_name': 'hpcs.compute', - 'operator': 'LT', - 'threshold': 100, - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - ] - - self.default_sadds = [{'sub_alarm_definition_id': '111', - 'dimension_name': 'flavor_id', - 'value': '777'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'image_id', - 'value': '888'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'metric_name', - 'value': 'cpu'}, - {'sub_alarm_definition_id': '111', - 'dimension_name': 'device', - 'value': '1'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'flavor_id', - 'value': '777'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'image_id', - 'value': '888'}, - {'sub_alarm_definition_id': '222', - 'dimension_name': 'metric_name', - 'value': 'mem'}] - - self.default_nms = [{'id': '29387234', - 'tenant_id': 'alarm-test', - 'name': 'MyEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}, - {'id': '77778687', - 'tenant_id': 'alarm-test', - 'name': 'OtherEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'created_at': datetime.datetime.now(), - 'updated_at': datetime.datetime.now()}] - - self.default_aas = [{'alarm_definition_id': '123', - 'alarm_state': 'ALARM', - 'action_id': '29387234'}, - {'alarm_definition_id': '123', - 'alarm_state': 'ALARM', - 'action_id': '77778687'}, - {'alarm_definition_id': '234', - 'alarm_state': 'ALARM', - 'action_id': '29387234'}, - {'alarm_definition_id': '234', - 'alarm_state': 'ALARM', - 'action_id': '77778687'}] - - with self.engine.begin() as conn: - conn.execute(self._delete_ad_query) - conn.execute(self._insert_ad_query, self.default_ads) - conn.execute(self._delete_sad_query) - conn.execute(self._insert_sad_query, self.default_sads) - conn.execute(self._delete_sadd_query) - conn.execute(self._insert_sadd_query, self.default_sadds) - conn.execute(self._delete_nm_query) - conn.execute(self._insert_nm_query, self.default_nms) - conn.execute(self._delete_aa_query) - conn.execute(self._insert_aa_query, self.default_aas) - - def test_should_create(self): - expression = ('AVG(hpcs.compute{flavor_id=777, image_id=888,' - ' metric_name=cpu}) > 10') - description = '' - match_by = ['flavor_id', 'image_id'] - sub_expr_list = (alarm_expr_parser.AlarmExprParser(expression).sub_expr_list) - alarm_actions = ['29387234', '77778687'] - alarmA_id = self.repo.create_alarm_definition('555', - '90% CPU', - expression, - sub_expr_list, - description, - 'LOW', - match_by, - alarm_actions, - None, - None) - - alarmB = self.repo.get_alarm_definition('555', alarmA_id) - - self.assertEqual(alarmA_id, alarmB['id']) - - query_sad = (select(self.sad.c.id) - .select_from(self.sad) - .where(self.sad.c.alarm_definition_id == alarmA_id)) - - query_sadd = (select(func.count()) - .select_from(self.sadd) - .where(self.sadd.c.sub_alarm_definition_id == bindparam('b_id'))) - - with self.engine.connect() as conn: - count_sad = conn.execute(query_sad).fetchall() - self.assertEqual(len(count_sad), 1) - count_sadd = conn.execute(query_sadd, parameters={'b_id': count_sad[0][0]}).fetchone() - self.assertEqual(count_sadd[0], 3) - - def test_should_try_to_create_with_wrong_alarm_action(self): - expression = ('AVG(hpcs.compute{flavor_id=777, image_id=888,' - ' metric_name=cpu}) > 10') - description = '' - match_by = ['flavor_id', 'image_id'] - sub_expr_list = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - alarm_actions = ['66666666'] - args = ('555', - '90% CPU', - expression, - sub_expr_list, - description, - 'LOW', - match_by, - alarm_actions, - None, - None) - self.assertRaises(exceptions.InvalidUpdateException, - self.repo.create_alarm_definition, *args) - - def test_should_update(self): - expression = ''.join(['AVG(hpcs.compute{flavor_id=777, image_id=888,', - ' metric_name=mem}) > 20 and', - ' AVG(hpcs.compute) < 100']) - description = '' - match_by = ['flavor_id', 'image_id'] - sub_expr_list = (alarm_expr_parser.AlarmExprParser(expression).sub_expr_list) - alarm_actions = ['29387234', '77778687'] - self.repo.update_or_patch_alarm_definition('bob', '234', - '90% CPU', expression, - sub_expr_list, False, - description, alarm_actions, - None, None, - match_by, 'LOW') - alarm = self.repo.get_alarm_definition('bob', '234') - expected = {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': '', - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and' - ' AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None} - - self.assertEqual(alarm, expected) - - sub_alarms = self.repo.get_sub_alarm_definitions('234') - - expected = [{'alarm_definition_id': '234', - 'dimensions': 'flavor_id=777,image_id=888,' - 'metric_name=mem', - 'function': 'AVG', - 'id': '222', - 'metric_name': 'hpcs.compute', - 'operator': 'GT', - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'threshold': 20.0}, - {'alarm_definition_id': '234', - 'dimensions': None, - 'function': 'AVG', - 'id': '223', - 'metric_name': 'hpcs.compute', - 'operator': 'LT', - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'threshold': 100.0}] - - self.assertEqual(len(sub_alarms), len(expected)) - - for s, e in zip(sub_alarms, expected): - e['created_at'] = s['created_at'] - e['updated_at'] = s['updated_at'] - - self.assertEqual(sub_alarms, expected) - am = self.repo.get_alarm_metrics('bob', '234') - self.assertEqual(am, []) - - sub_alarms = self.repo.get_sub_alarms('bob', '234') - self.assertEqual(sub_alarms, []) - - ads = self.repo.get_alarm_definitions( - 'bob', '90% CPU', { - 'image_id': '888'}, None, None, 0, 100) - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}, - {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': '', - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and' - ' AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - self.assertEqual(ads, expected) - - ads = self.repo.get_alarm_definitions( - 'bob', '90% CPU', { - 'image_id': '888'}, 'LOW', None, 0, 100) - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}, - {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': '', - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and' - ' AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - self.assertEqual(ads, expected) - - ads = self.repo.get_alarm_definitions( - 'bob', '90% CPU', { - 'image_id': '888'}, 'CRITICAL', None, 0, 100) - expected = [] - self.assertEqual(ads, expected) - - self.repo.update_or_patch_alarm_definition('bob', '234', - '90% CPU', None, - sub_expr_list, False, - description, alarm_actions, - None, None, - match_by, 'LOW') - - self.repo.update_or_patch_alarm_definition('bob', '234', - None, None, - None, True, - None, None, - None, None, - None, None, - True) - - self.repo.update_or_patch_alarm_definition('bob', '234', - None, None, - None, None, - None, None, - None, None, - None, None, - True) - - self.repo.update_or_patch_alarm_definition('bob', '234', - None, None, - None, None, - None, [], - [], [], - None, None, - True) - - self.repo.update_or_patch_alarm_definition('bob', '234', - None, None, - None, False, - None, None, - None, None, - match_by, None, - False) - - self.assertRaises(exceptions.InvalidUpdateException, - self.repo.update_or_patch_alarm_definition, - 'bob', '234', - None, None, - None, False, - None, None, - None, None, - None, None, - False) - - self.assertRaises(exceptions.InvalidUpdateException, - self.repo.update_or_patch_alarm_definition, - 'bob', '234', - '90% CPU', None, - sub_expr_list, False, - description, alarm_actions, - None, None, - 'update_match_by', 'LOW') - - self.repo.delete_alarm_definition('bob', '234') - - self.assertRaises(exceptions.DoesNotExistException, - self.repo.get_alarm_definition, 'bob', '234') - - def test_should_find_by_id(self): - alarmDef1 = self.repo.get_alarm_definition('bob', '123') - expected = {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None} - self.assertEqual(alarmDef1, expected) - with self.engine.begin() as conn: - conn.execute(self._delete_aa_query) - - alarmDef2 = self.repo.get_alarm_definition('bob', '123') - expected['alarm_actions'] = None - self.assertEqual(alarmDef2, expected) - - def test_shoud_find_sub_alarm_metric_definitions(self): - sub_alarms = self.repo.get_sub_alarm_definitions('123') - - expected = [{'alarm_definition_id': '123', - 'dimensions': 'flavor_id=777,image_id=888,' - 'metric_name=cpu,device=1', - 'function': 'AVG', - 'id': '111', - 'metric_name': 'hpcs.compute', - 'operator': 'GT', - 'period': 60, - 'periods': 1, - 'is_deterministic': True, - 'threshold': 10.0}] - - self.assertEqual(len(sub_alarms), len(expected)) - - for s, e in zip(sub_alarms, expected): - e['created_at'] = s['created_at'] - e['updated_at'] = s['updated_at'] - - self.assertEqual(sub_alarms, expected) - - sub_alarms = self.repo.get_sub_alarm_definitions('234') - - expected = [{'alarm_definition_id': '234', - 'dimensions': 'flavor_id=777,image_id=888,metric_name=mem', - 'function': 'AVG', - 'id': '222', - 'metric_name': 'hpcs.compute', - 'operator': 'GT', - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'threshold': 20.0}, - {'alarm_definition_id': '234', - 'dimensions': None, - 'function': 'AVG', - 'id': '223', - 'metric_name': 'hpcs.compute', - 'operator': 'LT', - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'threshold': 100.0}] - - self.assertEqual(len(sub_alarms), len(expected)) - - for s, e in zip(sub_alarms, expected): - e['created_at'] = s['created_at'] - e['updated_at'] = s['updated_at'] - - self.assertEqual(sub_alarms, expected) - - sub_alarms = self.repo.get_sub_alarm_definitions('asdfasdf') - self.assertEqual(sub_alarms, []) - - def test_try_update_alarm_that_does_not_exist(self): - args = ('koala', '999', - None, None, - None, True, - None, None, - None, None, - None, None, - True) - self.assertRaises(exceptions.DoesNotExistException, - self.repo.update_or_patch_alarm_definition, *args) - - def test_exists(self): - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - name='90% CPU') - expected = {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None} - - self.assertEqual(alarmDef1, [expected]) - alarmDef2 = self.repo.get_alarm_definitions(tenant_id='bob', - name='999% CPU') - self.assertEqual(alarmDef2, []) - - def test_should_find(self): - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - limit=1) - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}, - {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and ' - 'AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '50% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - self.assertEqual(alarmDef1, expected) - with self.engine.begin() as conn: - conn.execute(self._delete_aa_query) - - alarmDef2 = self.repo.get_alarm_definitions(tenant_id='bob', - limit=1) - expected[0]['alarm_actions'] = None - expected[1]['alarm_actions'] = None - self.assertEqual(alarmDef2, expected) - - alarmDef3 = self.repo.get_alarm_definitions(tenant_id='bill', - limit=1) - self.assertEqual(alarmDef3, []) - - alarmDef3 = self.repo.get_alarm_definitions(tenant_id='bill', - offset='10', - limit=1) - self.assertEqual(alarmDef3, []) - - def test_should_find_by_dimension(self): - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777,' - ' image_id=888, metric_name=mem}) > 20 ' - 'and AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '50% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - dimensions = {'image_id': '888', 'metric_name': 'mem'} - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - dimensions=dimensions, - limit=1) - self.assertEqual(alarmDef1, expected) - - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}, - {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and ' - 'AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '50% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - dimensions = {'image_id': '888'} - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - dimensions=dimensions, - limit=1) - self.assertEqual(alarmDef1, expected) - - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - dimensions = {'device': '1'} - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - dimensions=dimensions, - limit=1) - self.assertEqual(alarmDef1, expected) - - dimensions = {'Not real': 'AA'} - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - dimensions=dimensions, - limit=1) - self.assertEqual(alarmDef1, []) - - def test_should_find_and_sort(self): - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=cpu, device=1},deterministic) > 10', - 'id': '123', - 'match_by': 'flavor_id,image_id', - 'name': '90% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}, - {'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 and ' - 'AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '50% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - sort_by=['id']) - self.assertEqual(expected, alarmDef1) - alarmDef2 = self.repo.get_alarm_definitions(tenant_id='bob', - sort_by=['name']) - self.assertEqual(expected[::-1], alarmDef2) - - def test_should_delete_by_id(self): - self.repo.delete_alarm_definition('bob', '123') - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.get_alarm_definition, - 'bob', - '123') - - expected = [{'actions_enabled': False, - 'alarm_actions': '29387234,77778687', - 'description': None, - 'expression': 'AVG(hpcs.compute{flavor_id=777, ' - 'image_id=888, metric_name=mem}) > 20 ' - 'and AVG(hpcs.compute) < 100', - 'id': '234', - 'match_by': 'flavor_id,image_id', - 'name': '50% CPU', - 'ok_actions': None, - 'severity': 'LOW', - 'undetermined_actions': None}] - - alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob', - limit=1) - self.assertEqual(alarmDef1, expected) - - def test_try_delete_alarm_that_does_not_exist(self): - response = self.repo.delete_alarm_definition('goku', '123') - self.assertEqual(False, response) - - def test_should_patch_name(self): - self.run_patch_test(name=u'90% CPU New') - - def test_should_patch_description(self): - self.run_patch_test(description=u'New Description') - - def test_should_patch_severity(self): - self.run_patch_test(severity=u'CRITICAL') - - def test_should_patch_actions_enabled(self): - self.run_patch_test(actions_enabled=False) - - def test_should_patch_ok_actions(self): - self.run_patch_test(ok_actions=[u'29387234']) - - def test_should_patch_alarm_actions(self): - self.run_patch_test(alarm_actions=[u'29387234']) - - def test_should_patch_undetermined_actions(self): - self.run_patch_test(undetermined_actions=[u'29387234', u'77778687']) - - def test_should_patch_match_by(self): - # match_by can't change, so make sure old value works - self.run_patch_test(match_by=[u'flavor_id', u'image_id']) - - def test_should_patch_expression_no_change(self): - # match_by can't change, so make sure old value works - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression']) - - def test_should_patch_expression_threshold_change(self): - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace(' 10', ' 20')) - - def test_should_patch_expression_deterministic_change(self): - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace(',deterministic', - '')) - - def test_should_patch_expression_function_change(self): - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace('AVG', 'MAX')) - - def test_should_patch_expression_operation_change(self): - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace('>', '<')) - - def test_should_patch_expression_period_change(self): - self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace(')', ', 120)')) - - def test_should_patch_expression_periods_change(self): - self.run_patch_test( - expression=ALARM_DEF_123_FIELDS['expression'].replace( - ' 10', ' 10 times 2')) - - def test_patch_fails_change_match_by(self): - self.assertRaises( - exceptions.InvalidUpdateException, - self.run_patch_test, - match_by=u'device') - - def test_patch_fails_change_metric_name(self): - self.assertRaises(exceptions.InvalidUpdateException, self.run_patch_test, - expression=ALARM_DEF_123_FIELDS['expression'].replace('hpcs.compute', - 'new_metric_name')) - - def test_patch_fails_change_metric_dimensions(self): - self.assertRaises(exceptions.InvalidUpdateException, self.run_patch_test, - expression=ALARM_DEF_123_FIELDS['expression'].replace('image_id=888', - 'image_id=42')) - - def test_patch_fails_change_num_sub_expressions(self): - self.assertRaises(exceptions.InvalidUpdateException, self.run_patch_test, - expression=ALARM_DEF_123_FIELDS['expression'] - .replace(' 10', ' 10 and MAX(cpu.idle_perc) < 10')) - - def run_patch_test(self, name=None, expression=None, description=None, actions_enabled=None, - alarm_actions=None, ok_actions=None, undetermined_actions=None, - match_by=None, severity=None): - if expression: - sub_expr_list = (alarm_expr_parser.AlarmExprParser(expression).sub_expr_list) - else: - sub_expr_list = None - - # updated_row (dict), sub_alarm_defs_dict - updates = self.repo.update_or_patch_alarm_definition(TENANT_ID, '123', - name, expression, - sub_expr_list, actions_enabled, - description, alarm_actions, - ok_actions, undetermined_actions, - match_by, severity, - patch=True) - - self.assertEqual(updates[0]['id'], ALARM_DEF_123_FIELDS['id']) - self.assertEqual(updates[0]['name'], name if name else ALARM_DEF_123_FIELDS['name']) - self.assertEqual( - updates[0]['expression'], - expression if expression else ALARM_DEF_123_FIELDS['expression'] - ) - self.assertEqual(updates[0]['match_by'], ALARM_DEF_123_FIELDS['match_by']) - self.assertEqual( - updates[0]['severity'], - severity if severity else ALARM_DEF_123_FIELDS['severity'] - ) - self.assertEqual( - updates[0]['actions_enabled'], - actions_enabled if actions_enabled else ALARM_DEF_123_FIELDS['actions_enabled'] - ) - self.assertEqual( - updates[0]['alarm_actions'], - u','.join(alarm_actions) if alarm_actions else ALARM_DEF_123_FIELDS['alarm_actions'] - ) - self.assertEqual( - updates[0]['ok_actions'], - u','.join(ok_actions) if ok_actions else ALARM_DEF_123_FIELDS['ok_actions'] - ) - self.assertEqual( - updates[0]['undetermined_actions'], - (u','.join(undetermined_actions) - if undetermined_actions - else ALARM_DEF_123_FIELDS['undetermined_actions']) - ) - - sad = self.default_sads[0] - if expression and ALARM_DEF_123_FIELDS['expression'] != expression: - sub_expr = sub_expr_list[0] - sub_alarm_def = sub_alarm_definition.SubAlarmDefinition( - row={'id': '', - 'alarm_definition_id': sad['alarm_definition_id'], - 'function': sub_expr.normalized_func, - 'metric_name': sub_expr.metric_name, - 'dimensions': u'device=1,image_id=888,flavor_id=777,metric_name=cpu', - 'operator': sub_expr.normalized_operator, - 'threshold': sub_expr.threshold, - 'period': sub_expr.period, - 'is_deterministic': sub_expr.deterministic, - 'periods': sub_expr.periods}) - expected_sub_alarm_maps = { - 'changed': { - u'111': sub_alarm_def}, - 'new': {}, - 'old': {}, - 'unchanged': {}} - else: - sub_alarm_def = sub_alarm_definition.SubAlarmDefinition( - row={'id': sad['id'], - 'alarm_definition_id': sad['alarm_definition_id'], - 'function': sad['function'], - 'metric_name': sad['metric_name'], - 'dimensions': u'device=1,image_id=888,flavor_id=777,metric_name=cpu', - 'operator': sad['operator'], - 'threshold': sad['threshold'], - 'period': sad['period'], - 'is_deterministic': sad['is_deterministic'], - 'periods': sad['periods']}) - expected_sub_alarm_maps = {'changed': {}, 'new': {}, - 'old': {}, 'unchanged': {u'111': sub_alarm_def}} - self.assertEqual(expected_sub_alarm_maps, updates[1]) diff --git a/monasca_api/tests/test_alarm_expression.py b/monasca_api/tests/test_alarm_expression.py deleted file mode 100644 index fbc201b77..000000000 --- a/monasca_api/tests/test_alarm_expression.py +++ /dev/null @@ -1,153 +0,0 @@ -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import pyparsing - -from monasca_api.expression_parser import alarm_expr_parser -from monasca_api.tests import base - - -class TestAlarmExpression(base.BaseTestCase): - - good_simple_expression = "max(cpu.idle_perc{hostname=fred}, 60) <= 3 times 4 OR \ - avg(CPU.PERCENT)<5 OR min(cpu.percent, deterministic) gte 3" - - def test_good_expression(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual(3, len(sub_exprs)) - - def test_fmtd_sub_expr(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.fmtd_sub_expr_str for x in sub_exprs], - ['MAX(cpu.idle_perc{hostname=fred}) <= 3.0 times 4', - 'AVG(CPU.PERCENT{}) < 5.0', 'MIN(cpu.percent{}) gte 3.0']) - - def test_dimensions_str(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.dimensions_str for x in sub_exprs], ['hostname=fred', '', '']) - - def test_function(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.func for x in sub_exprs], ['max', 'avg', 'min']) - - def test_normalized_function(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.normalized_func for x in sub_exprs], ['MAX', 'AVG', 'MIN']) - - def test_metric_name(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.metric_name for x in sub_exprs], - ['cpu.idle_perc', 'CPU.PERCENT', 'cpu.percent']) - - def test_normalized_metric_name(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.normalized_metric_name for x in sub_exprs], - ['cpu.idle_perc', 'cpu.percent', 'cpu.percent']) - - def test_dimensions(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.dimensions for x in sub_exprs], ['hostname=fred', '', '']) - - def test_dimensions_as_list(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - print([x.dimensions_as_list for x in sub_exprs].__str__()) - self.assertEqual([x.dimensions_as_list for x in sub_exprs].__str__(), - "[ParseResults(['hostname=fred'], {}), [], []]") - - def test_operator(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.operator for x in sub_exprs], ['<=', '<', 'gte']) - - def test_threshold(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.threshold for x in sub_exprs], [3.0, 5.0, 3.0]) - - def test_period(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.period for x in sub_exprs], [60, 60, 60]) - - def test_periods(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.periods for x in sub_exprs], [4, 1, 1]) - - def test_deterministic(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.deterministic for x in sub_exprs], [False, False, True]) - - def test_normalized_operator(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.normalized_operator for x in sub_exprs], ['LTE', 'LT', 'GTE']) - - def test_id(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertEqual([x.id for x in sub_exprs], [None, None, None]) - - def test_set_id(self): - expression = self.good_simple_expression - sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - for x in sub_exprs: - x.id = 88 - self.assertEqual([x.id for x in sub_exprs], [88, 88, 88]) - - def _ensure_parse_fails(self, expression): - parser = alarm_expr_parser.AlarmExprParser(expression) - self.assertRaises( - (pyparsing.ParseException, - pyparsing.ParseFatalException), - getattr, parser, "sub_expr_list") - - def test_incomplete_operator(self): - expression = self.good_simple_expression.replace('<= 3', '') - self._ensure_parse_fails(expression) - - def test_no_dimension_name(self): - expression = self.good_simple_expression.replace('hostname', '') - self._ensure_parse_fails(expression) - - def test_no_metric_name(self): - expression = self.good_simple_expression.replace('cpu.idle_perc', '') - self._ensure_parse_fails(expression) - - def test_invalid_period(self): - expression = self.good_simple_expression.replace('60', '42') - self._ensure_parse_fails(expression) - - def test_zero_period(self): - expression = self.good_simple_expression.replace('60', '0') - self._ensure_parse_fails(expression) - - def test_negative_period(self): - expression = self.good_simple_expression.replace('60', '-60') - self._ensure_parse_fails(expression) - - def test_zero_periods(self): - expression = self.good_simple_expression.replace('times 4', 'times 0') - self._ensure_parse_fails(expression) diff --git a/monasca_api/tests/test_alarms.py b/monasca_api/tests/test_alarms.py deleted file mode 100644 index bb9f7fd48..000000000 --- a/monasca_api/tests/test_alarms.py +++ /dev/null @@ -1,2092 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 Cray Inc. -# (C) Copyright 2015,2017 Hewlett Packard Enterprise Development LP -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from collections import OrderedDict -import copy -import datetime -import json - -import falcon.testing -import fixtures -import testtools.matchers as matchers - -from unittest.mock import Mock - -import oslo_config.fixture -import six - -from monasca_api.common.repositories.model import sub_alarm_definition -from monasca_api.tests import base -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.reference import alarm_definitions -from monasca_api.v2.reference import alarms - -CONF = oslo_config.cfg.CONF - -TENANT_ID = u"fedcba9876543210fedcba9876543210" - -ALARM_HISTORY = OrderedDict(( - # Only present in data returned from InfluxDB: - (u"time", u"2015-01-01T00:00:00.000Z"), - # Only present in data returned from API: - (u"timestamp", u"2015-01-01T00:00:00.000Z"), - (u"alarm_id", u"10000000-1000-1000-1000-10000000000"), - (u"metrics", [{ - u"id": None, - u"name": u"test.metric", - u"dimensions": {u"dim1": u"dval1", u"dim2": u"dval2"} - }]), - (u"new_state", u"ALARM"), - (u"old_state", u"OK"), - (u"reason", u"Alarm reason"), - (u"reason_data", u"{}"), - (u"sub_alarms", [{ - u"sub_alarm_expression": { - u"function": u"MAX", - # Only present in data returned from InfluxDB: - u"metric_definition": { - u"id": None, - u"name": u"test.metric", - u"dimensions": {u"dim1": u"dval1"}, - }, - # Only present in data returned from API: - u'metric_name': u'test.metric', - # Only present in data returned from API: - u'dimensions': {u'dim1': u'dval1'}, - u"operator": u"GT", - u"threshold": 50.0, - u"period": 60, - u"periods": 1 - }, - u"sub_alarm_state": u"ALARM", - u"current_values": [50.1], - }]), - # Only present in data returned from InfluxDB: - (u"tenant_id", TENANT_ID), - # Only present in data returned from API: - (u"id", u"1420070400000"), -)) - - -class InfluxClientAlarmHistoryResponseFixture(fixtures.MockPatch): - def _build_series(self, name, column_dict): - return { - "name": name, - "columns": column_dict.keys(), - "values": [column_dict.values(), ], - } - - def _setUp(self): - super(InfluxClientAlarmHistoryResponseFixture, self)._setUp() - - mock_data = copy.deepcopy(ALARM_HISTORY) - - del mock_data[u"id"] - del mock_data[u"timestamp"] - del mock_data[u"sub_alarms"][0][u"sub_alarm_expression"][u"metric_name"] - del mock_data[u"sub_alarms"][0][u"sub_alarm_expression"][u"dimensions"] - mock_data[u"sub_alarms"] = json.dumps(mock_data[u"sub_alarms"]) - mock_data[u"metrics"] = json.dumps(mock_data[u"metrics"]) - - self.mock.return_value.query.return_value.raw = { - "series": [self._build_series("alarm_state_history", mock_data)] - } - - -class RESTResponseEquals(object): - """Match if the supplied data contains a single string containing a JSON - object which decodes to match expected_data, excluding the contents of - the 'links' key. - """ - - def __init__(self, expected_data): - self.expected_data = expected_data - - if u"links" in expected_data: - del expected_data[u"links"] - - def __str__(self): - return 'RESTResponseEquals(%s)' % (self.expected,) - - def match(self, actual): - response_data = actual.json - - if u"links" in response_data: - del response_data[u"links"] - - return matchers.Equals(self.expected_data).match(response_data) - - -class AlarmTestBase(base.BaseApiTestCase): - def setUp(self): - super(AlarmTestBase, self).setUp() - - self.useFixture(fixtures.MockPatch( - 'monasca_api.common.messaging.kafka_publisher.KafkaPublisher')) - - # [messaging] - self.conf_override( - driver='monasca_api.common.messaging.' - 'kafka_publisher:KafkaPublisher', - group='messaging') - - # [repositories] - self.conf_override( - alarms_driver='monasca_api.common.repositories.sqla.' - 'alarms_repository:AlarmsRepository', - group='repositories') - self.conf_override( - alarm_definitions_driver='monasca_api.common.repositories.' - 'alarm_definitions_repository:' - 'AlarmDefinitionsRepository', - group='repositories') - self.conf_override( - metrics_driver='monasca_api.common.repositories.influxdb.' - 'metrics_repository:MetricsRepository', - group='repositories') - - -class TestAlarmsStateHistory(AlarmTestBase): - def setUp(self): - super(TestAlarmsStateHistory, self).setUp() - - self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.' - 'alarms_repository.AlarmsRepository')) - self.useFixture(InfluxClientAlarmHistoryResponseFixture( - 'monasca_api.common.repositories.influxdb.' - 'metrics_repository.client.InfluxDBClient')) - - self.alarms_resource = alarms.AlarmsStateHistory() - self.app.add_route( - '/v2.0/alarms/{alarm_id}/state-history', self.alarms_resource) - - self.app.add_route( - '/v2.0/alarms/state-history', self.alarms_resource) - - def test_alarm_state_history(self): - expected_elements = {u"elements": [dict(ALARM_HISTORY)]} - del expected_elements[u"elements"][0][u"time"] - del (expected_elements[u"elements"][0][u"sub_alarms"][0] - [u"sub_alarm_expression"][u"metric_definition"]) - del expected_elements[u"elements"][0][u"tenant_id"] - response = self.simulate_request( - path=u'/v2.0/alarms/%s/state-history/' % ALARM_HISTORY[u"alarm_id"], - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - }) - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_alarm_state_history_no_alarm_id(self): - expected_elements = {u'elements': []} - - response = self.simulate_request( - path=u'/v2.0/alarms/state-history/', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - }) - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - -class TestAlarmsCount(AlarmTestBase): - def setUp(self): - super(TestAlarmsCount, self).setUp() - - self.alarms_get_alarms_count_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.alarms_repository.AlarmsRepository' - )).mock - - self.alarms_count_resource = alarms.AlarmsCount() - self.app.add_route('/v2.0/alarms/count', - self.alarms_count_resource) - - def test_get_alarm_count(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'counts': [[4]], 'columns': ['count']} - - return_value.get_alarms_count.return_value = [{'count': 4}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_state_parameter(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'counts': [[4]], 'columns': ['count']} - - return_value.get_alarms_count.return_value = [{'count': 4}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='state=OK') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_severity_parameter(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'counts': [[4]], 'columns': ['count']} - - return_value.get_alarms_count.return_value = [{'count': 4}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='severity=LOW') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_group_by_parameter(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'columns': ['count', 'metric_name'], - 'counts': [[2, 'cpu.idle_perc'], - [1, 'cpu.sys_mem']]} - - return_value.get_alarms_count.return_value = [{'metric_name': u'cpu.idle_perc', 'count': 2}, - {'metric_name': u'cpu.sys_mem', 'count': 1}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=metric_name') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - expected_elements = {'columns': ['count', 'metric_name', 'dimension_name'], - 'counts': [[2, 'cpu.idle_perc', 'hostname'], - [1, 'cpu.sys_mem', 'hostname']]} - - return_value.get_alarms_count.return_value = [{'metric_name': u'cpu.idle_perc', - 'dimension_name': 'hostname', - 'count': 2}, - {'metric_name': u'cpu.sys_mem', - 'dimension_name': 'hostname', - 'count': 1}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=metric_name,dimension_name') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_incorrect_group_by_parameter(self): - return_value = self.alarms_get_alarms_count_mock.return_value - - return_value.get_alarms_count.return_value = [{'metric_name': u'cpu.idle_perc', 'count': 2}, - {'metric_name': u'cpu.sys_mem', 'count': 1}] - - response = self.simulate_request( - path='/v2.0/alarms/count', - headers={'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=hahahah') - - self.assertEqual(response.status, falcon.HTTP_422) - - def test_get_alarm_count_offset(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'columns': ['count', 'metric_name'], - 'counts': [[2, 'cpu.idle_perc']]} - - return_value.get_alarms_count.return_value = [{'metric_name': u'cpu.idle_perc', 'count': 2}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=metric_name&offset=1') - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_incorrect_offset(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'description': 'Offset must be a valid integer, was hahahah', - 'title': 'Unprocessable Entity'} - - return_value.get_alarms_count.return_value = [{'metric_name': u'cpu.idle_perc', 'count': 2}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=metric_name&offset=hahahah') - self.assertEqual(response.status, falcon.HTTP_422) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_limit_parameter(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'counts': [[4]], 'columns': ['count']} - - return_value.get_alarms_count.return_value = [{'count': 4}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='limit=1') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - return_value.get_alarms_count.return_value = [{'count': 4}] - expected_elements = {'counts': [], 'columns': ['count']} - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='limit=0') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - def test_get_alarm_count_when_count_is_zero(self): - return_value = self.alarms_get_alarms_count_mock.return_value - expected_elements = {'columns': ['count', 'metric_name'], 'counts': [[0, None]]} - - return_value.get_alarms_count.return_value = [{'count': 0}] - - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='group_by=metric_name') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - expected_elements = {'columns': ['count'], 'counts': [[0]]} - response = self.simulate_request(path='/v2.0/alarms/count', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_elements)) - - -class TestAlarms(AlarmTestBase): - def setUp(self): - super(TestAlarms, self).setUp() - - self.alarms_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.alarms_repository.AlarmsRepository' - )).mock - - self.alarms_resource = alarms.Alarms() - self.app.add_route('/v2.0/alarms', - self.alarms_resource) - self.app.add_route('/v2.0/alarms/{alarm_id}', - self.alarms_resource) - - def test_alarms_get_alarms(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = { - 'elements': [{ - 'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'}]} - - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarm(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarm.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = {'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'} - - response = self.simulate_request(path='/v2.0/alarms/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarms_state_parameter(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = { - 'elements': [{ - 'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'}]} - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='state=OK') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarms_severity_parameter(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = { - 'elements': [{ - 'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'}]} - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='severity=LOW') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarms_with_offset(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = { - 'elements': [{ - 'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'}]} - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='offset=1') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarms_with_incorrect_offset(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - response = self.simulate_request( - path='/v2.0/alarms', - headers={'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='offset=ninccorect_offset') - - self.assertEqual(response.status, falcon.HTTP_422) - - def test_alarms_get_alarms_sort_by_parameter(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - expected_alarms = { - 'elements': [{ - 'alarm_definition': { - 'id': '1', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarm-definitions/1', - 'rel': 'self'}], - 'name': '90% CPU', - 'severity': 'LOW'}, - 'created_timestamp': '2015-03-14T09:26:53Z', - 'id': '1', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/alarms/1', - 'rel': 'self'}], - 'metrics': [{ - 'dimensions': { - 'instance_id': '123', - 'service': 'monitoring'}, - 'name': 'cpu.idle_perc'}], - 'state': 'OK', - 'state_updated_timestamp': '2015-03-14T09:26:53Z', - 'updated_timestamp': '2015-03-14T09:26:53Z'}]} - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='sort_by=alarm_id') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarms)) - - def test_alarms_get_alarms_incorrect_sort_by_parameter(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarms.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'updated_timestamp': datetime.datetime(2015, 3, 14, 9, 26, 53), - 'alarm_id': '1', - 'lifecycle_state': 'OPEN'}] - - response = self.simulate_request(path='/v2.0/alarms', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='sort_by=random_string') - - self.assertEqual(response.status, falcon.HTTP_422) - - def test_alarms_delete_alarms(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarm_metrics.return_value = \ - [{'alarm_id': u'2', - 'name': u'cpu.idle_perc', - 'dimensions': u'instance_id=123,service=monitoring'}] - return_value.get_sub_alarms.return_value = \ - [{'sub_alarm_id': u'1', - 'alarm_id': u'2', - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'alarm_definition_id': u'1'}] - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='DELETE') - self.assertEqual(response.status, falcon.HTTP_204) - - def test_alarms_put(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarm_metrics.return_value = \ - [{'alarm_id': u'2', - 'name': u'cpu.idle_perc', - 'dimensions': u'instance_id=123,service=monitoring'}] - return_value.get_sub_alarms.return_value = \ - [{'sub_alarm_id': u'1', - 'alarm_id': u'2', - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'alarm_definition_id': u'1'}] - return_value.update_alarm.return_value = \ - ({'state': u'UNDETERMINED', - 'link': u'http://somesite.com/this-alarm-info', - 'lifecycle_state': u'OPEN'}, - 1550835096962) - return_value.get_alarm_definition.return_value = \ - {'description': None, - 'tenant_id': u'bob', - 'created_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850963), - 'name': u'90% CPU', - 'actions_enabled': False, - 'match_by': None, - 'deleted_at': None, - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'id': u'1', - 'severity': u'LOW'} - return_value.get_alarm.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'alarm_id': '1', - 'lifecycle_state': 'ALARM'}] - alarm_new_fields = {'state': 'ALARM', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info'} - - expected_alarm = {u'alarm_definition': {u'id': u'1', - u'links': [ - {u'href': u'http://falconframework.org' - u'/v2.0/alarm-definitions/1', - u'rel': u'self'}], - u'name': u'90% CPU', - u'severity': u'LOW'}, - u'created_timestamp': u'2019-02-22T12:44:25.850947Z', - u'id': u'1', - u'lifecycle_state': u'ALARM', - u'link': u'http://somesite.com/this-alarm-info', - u'metrics': [{u'dimensions': {u'instance_id': u'123', - u'service': u'monitoring'}, - u'name': u'cpu.idle_perc'}], - u'state': u'OK', - u'state_updated_timestamp': u'2019-02-22T12:44:25.850947Z', - u'updated_timestamp': u'2019-02-22T12:44:25.850947Z'} - - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PUT', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarm)) - - def test_alarms_put_without_link(self): - alarm_new_fields = {'state': 'ALARM', - 'lifecycle_state': 'OPEN'} - expected_response = {u'description': u"Field 'link' is required", - u'title': u'Unprocessable Entity'} - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PUT', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_422) - self.assertThat(response, RESTResponseEquals(expected_response)) - - def test_alarms_put_without_lifecycle_state(self): - alarm_new_fields = {'state': 'ALARM', - 'link': 'http://somesite.com/this-alarm-info'} - expected_response = {u'description': u"Field 'lifecycle_state' is required", - u'title': u'Unprocessable Entity'} - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PUT', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_422) - self.assertThat(response, RESTResponseEquals(expected_response)) - - def test_alarms_put_without_state(self): - alarm_new_fields = {'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info'} - expected_response = {u'description': u"Field 'state' is required", - u'title': u'Unprocessable Entity'} - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PUT', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_422) - self.assertThat(response, RESTResponseEquals(expected_response)) - - def test_alarms_patch(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarm_metrics.return_value = \ - [{'alarm_id': u'2', - 'name': u'cpu.idle_perc', - 'dimensions': u'instance_id=123,service=monitoring'}] - return_value.get_sub_alarms.return_value = \ - [{'sub_alarm_id': u'1', - 'alarm_id': u'2', - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'alarm_definition_id': u'1'}] - return_value.update_alarm.return_value = \ - ({'state': u'UNDETERMINED', - 'link': u'http://somesite.com/this-alarm-info', - 'lifecycle_state': u'OPEN'}, - 1550835096962) - return_value.get_alarm_definition.return_value = \ - {'description': None, - 'tenant_id': u'bob', - 'created_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850963), - 'name': u'90% CPU', - 'actions_enabled': False, - 'match_by': None, - 'deleted_at': None, - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'id': u'1', - 'severity': u'LOW'} - return_value.get_alarm.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'alarm_id': '1', - 'lifecycle_state': 'ALARM'}] - alarm_new_fields = {'state': 'ALARM', - 'lifecycle_state': 'OPEN', - 'link': 'http://somesite.com/this-alarm-info'} - - expected_alarm = {u'alarm_definition': {u'id': u'1', - u'links': [ - {u'href': u'http://falconframework.org' - u'/v2.0/alarm-definitions/1', - u'rel': u'self'}], - u'name': u'90% CPU', - u'severity': u'LOW'}, - u'created_timestamp': u'2019-02-22T12:44:25.850947Z', - u'id': u'1', - u'lifecycle_state': u'ALARM', - u'link': u'http://somesite.com/this-alarm-info', - u'metrics': [{u'dimensions': {u'instance_id': u'123', - u'service': u'monitoring'}, - u'name': u'cpu.idle_perc'}], - u'state': u'OK', - u'state_updated_timestamp': u'2019-02-22T12:44:25.850947Z', - u'updated_timestamp': u'2019-02-22T12:44:25.850947Z'} - - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PATCH', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarm)) - - def test_alarms_patch_without_new_fields(self): - return_value = self.alarms_repo_mock.return_value - return_value.get_alarm_metrics.return_value = \ - [{'alarm_id': u'2', - 'name': u'cpu.idle_perc', - 'dimensions': u'instance_id=123,service=monitoring'}] - return_value.get_sub_alarms.return_value = \ - [{'sub_alarm_id': u'1', - 'alarm_id': u'2', - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'alarm_definition_id': u'1'}] - return_value.update_alarm.return_value = \ - ({'state': u'UNDETERMINED', - 'link': u'http://somesite.com/this-alarm-info', - 'lifecycle_state': u'OPEN'}, - 1550835096962) - return_value.get_alarm_definition.return_value = \ - {'description': None, - 'tenant_id': u'bob', - 'created_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_at': datetime.datetime(2019, 2, 22, 12, 44, 25, 850963), - 'name': u'90% CPU', - 'actions_enabled': False, - 'match_by': None, - 'deleted_at': None, - 'expression': u'avg(cpu.idle_perc{instance_id=123, service=monitoring}) > 10', - 'id': u'1', - 'severity': u'LOW'} - return_value.get_alarm.return_value = \ - [{'alarm_definition_id': '1', - 'metric_dimensions': 'instance_id=123,service=monitoring', - 'alarm_definition_name': '90% CPU', - 'state': 'OK', - 'state_updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'metric_name': 'cpu.idle_perc', - 'link': 'http://somesite.com/this-alarm-info', - 'severity': 'LOW', - 'created_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'updated_timestamp': datetime.datetime(2019, 2, 22, 12, 44, 25, 850947), - 'alarm_id': '1', - 'lifecycle_state': 'ALARM'}] - alarm_new_fields = {} - - expected_alarm = {u'alarm_definition': {u'id': u'1', - u'links': [ - {u'href': u'http://falconframework.org' - u'/v2.0/alarm-definitions/1', - u'rel': u'self'}], - u'name': u'90% CPU', - u'severity': u'LOW'}, - u'created_timestamp': u'2019-02-22T12:44:25.850947Z', - u'id': u'1', - u'lifecycle_state': u'ALARM', - u'link': u'http://somesite.com/this-alarm-info', - u'metrics': [{u'dimensions': {u'instance_id': u'123', - u'service': u'monitoring'}, - u'name': u'cpu.idle_perc'}], - u'state': u'OK', - u'state_updated_timestamp': u'2019-02-22T12:44:25.850947Z', - u'updated_timestamp': u'2019-02-22T12:44:25.850947Z'} - - response = self.simulate_request(path='/v2.0/alarms/2', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='PATCH', - body=json.dumps(alarm_new_fields)) - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_alarm)) - - -class TestAlarmDefinition(AlarmTestBase): - def setUp(self): - super(TestAlarmDefinition, self).setUp() - - self.alarm_def_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.' - 'alarm_definitions_repository.AlarmDefinitionsRepository' - )).mock - - self.alarm_definition_resource = alarm_definitions.AlarmDefinitions() - self.alarm_definition_resource.send_event = Mock() - self._send_event = self.alarm_definition_resource.send_event - - self.app.add_route("/v2.0/alarm-definitions", - self.alarm_definition_resource) - self.app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}", - self.alarm_definition_resource) - - def test_alarm_definition_create(self): - return_value = self.alarm_def_repo_mock.return_value - return_value.get_alarm_definitions.return_value = [] - return_value.create_alarm_definition.return_value = u"00000001-0001-0001-0001-000000000001" - - alarm_def = { - "name": "Test Definition", - "expression": "test.metric > 10" - } - - expected_data = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [], - u'name': u'Test Definition', - u'actions_enabled': True, - u'undetermined_actions': [], - u'expression': u'test.metric > 10', - u'deterministic': False, - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW', - } - - response = self.simulate_request(path="/v2.0/alarm-definitions/", - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="POST", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_201) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_alarm_definition_create_with_valid_expressions(self): - return_value = self.alarm_def_repo_mock.return_value - return_value.get_alarm_definitions.return_value = [] - return_value.create_alarm_definition.return_value = u"00000001-0001-0001-0001-000000000001" - - valid_expressions = [ - u"max(-_.千幸福的笑脸{घोड़ा=馬, " - u"dn2=dv2,千幸福的笑脸घ=千幸福的笑脸घ}) gte 100 " - u"times 3 && " - u"(min(ເຮືອນ{dn3=dv3,家=дом}) < 10 or sum(biz{dn5=dv5}) >99 and " - u"count(fizzle) lt 0or count(baz) > 1)", - - u"max(foo{hostname=mini-mon,千=千}, 120) > 100 and (max(bar)>100 " - u" or max(biz)>100)", - - u"max(foo)>=100", - - u"test_metric{this=that, that = this} < 1", - - u"max ( 3test_metric5 { this = that }) lt 5 times 3", - - u"3test_metric5 lt 3", - - u"ntp.offset > 1 or ntp.offset < -5", - ] - - alarm_def = { - u'name': u'Test Definition', - u'expression': u'test.metric > 10' - } - - expected_data = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [], - u'name': u'Test Definition', - u'actions_enabled': True, - u'undetermined_actions': [], - u'expression': u'test.metric > 10', - u'deterministic': False, - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW', - } - - for expression in valid_expressions: - alarm_def[u'expression'] = expression - expected_data[u'expression'] = expression - response = self.simulate_request(path="/v2.0/alarm-definitions/", - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="POST", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_201, - u'Expression {} should have passed'.format(expression)) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_alarm_definition_create_with_invalid_expressions(self): - bad_expressions = [ - "test=metric > 10", - "test.metric{dim=this=that} > 10", - "test_metric(5) > 2" - "test_metric > 10 and or alt_metric > 10" - ] - - alarm_def = { - u'name': 'Test Definition', - u'expression': 'test.metric > 10' - } - - for expression in bad_expressions: - alarm_def[u'expression'] = expression - response = self.simulate_request( - path="/v2.0/alarm-definitions/", - headers={'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="POST", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, '422 Unprocessable Entity', - u'Expression {} should have failed'.format(expression)) - - def test_alarm_definition_create_with_occupied_alarm_definition_name(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [{ - 'alarm_actions': None, - 'ok_actions': None, - 'description': None, - 'match_by': u'hostname', - 'name': u'Test Alarm', - 'actions_enabled': 1, - 'undetermined_actions': None, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - }] - alarm_def = { - u'name': u'Test Definition', - u'expression': u'max(test.metric{hostname=host}) gte 1' - } - response = self.simulate_request( - path="/v2.0/alarm-definitions/", - headers={'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="POST", - body=json.dumps(alarm_def)) - self.assertEqual(response.status, falcon.HTTP_409) - - def test_alarm_definition_update(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [] - self.alarm_def_repo_mock.return_value.update_or_patch_alarm_definition.return_value = ( - {u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Non-ASCII character: \u2603', - u'match_by': u'hostname', - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'is_deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW'}, - {'old': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'is_deterministic': False, - 'periods': 1})}, - 'changed': {}, - 'new': {}, - 'unchanged': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'is_deterministic': False, - 'periods': 1})}}) - - expected_def = { - u'id': u'00000001-0001-0001-0001-000000000001', - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Non-ASCII character: \u2603', - u'links': [{u'href': u'http://falconframework.org/v2.0/alarm-definitions/' - u'00000001-0001-0001-0001-000000000001', - u'rel': u'self'}], - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - - result = self.simulate_request(path="/v2.0/alarm-definitions/%s" % expected_def[u'id'], - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PUT", - body=json.dumps(alarm_def)) - - self.assertEqual(result.status, falcon.HTTP_200) - result_def = result.json - self.assertEqual(result_def, expected_def) - - def test_alarm_definition_patch_incorrect_id(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [{ - 'alarm_actions': None, - 'ok_actions': None, - 'description': None, - 'match_by': u'hostname', - 'name': u'Test Alarm', - 'actions_enabled': 1, - 'undetermined_actions': None, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - }] - alarm_def = { - u'name': u'Test Alarm Definition Updated', - } - response = self.simulate_request( - path="/v2.0/alarm-definitions/9999999-0001-0001-0001-000000000001", - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PATCH", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_409) - - def test_alarm_definition_put_incorrect_period_value(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [] - period = 'times 0' - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1 ' + period, - u'severity': u'LOW', - } - - response = self.simulate_request( - path="/v2.0/alarm-definitions/00000001-0001-0001-0001-000000000001", - headers={'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PUT", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_422) - - def test_alarm_definition_patch_no_id(self): - alarm_def = { - u'name': u'Test Alarm Definition Updated', - } - - response = self.simulate_request( - path="/v2.0/alarm-definitions/", - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PATCH", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_400) - - def test_alarm_definition_update_no_id(self): - alarm_def = { - u'name': u'Test Alarm Definition Updated', - } - - response = self.simulate_request( - path="/v2.0/alarm-definitions/", - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PUT", - body=json.dumps(alarm_def)) - - self.assertEqual(response.status, falcon.HTTP_400) - - def test_alarm_definition_delete(self): - - self.alarm_def_repo_mock.return_value.get_get_sub_alarm_definitions.return_value = [{ - 'alarm_definition_id': '123', - 'dimensions': 'flavor_id=777', - 'function': 'AVG', - 'id': '111', - 'metric_name': 'cpu.idle_perc', - 'operator': 'GT', - 'period': 60, - 'periods': 1, - 'is_deterministic': False, - 'threshold': 10.0}] - self.alarm_def_repo_mock.return_value.get_alarm_metrics.return_value = [{ - 'alarm_id': '1', - 'dimensions': 'flavor_id=777', - 'name': 'cpu.idle_perc'}] - self.alarm_def_repo_mock.return_value.get_sub_alarms.return_value = [{ - 'alarm_definition_id': '1', - 'alarm_id': '2', - 'expression': 'avg(cpu.idle_perc{flavor_id=777}) > 10', - 'sub_alarm_id': '43'}] - self.alarm_def_repo_mock.return_value.delete_alarm_definition.return_value = True - - response = self.simulate_request( - path='/v2.0/alarm-definitions/00000001-0001-0001-0001-000000000001', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='DELETE') - - self.assertEqual(response.status, falcon.HTTP_204) - - def test_alarm_definition_delete_alarm_definition_not_exist(self): - self.alarm_def_repo_mock.return_value.get_get_sub_alarm_definitions.return_value = [] - self.alarm_def_repo_mock.return_value.get_alarm_metrics.return_value = [] - self.alarm_def_repo_mock.return_value.get_sub_alarms.return_value = [] - self.alarm_def_repo_mock.return_value.delete_alarm_definition.return_value = False - - response = self.simulate_request( - path='/v2.0/alarm-definitions/00000001-0001-0001-0001-000000000001', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='DELETE') - - self.assertEqual(response.status, falcon.HTTP_404) - - def test_alarm_definition_delete_no_id(self): - - response = self.simulate_request( - path="/v2.0/alarm-definitions/", - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="DELETE") - - self.assertEqual(response.status, falcon.HTTP_400) - - def test_alarm_definition_patch(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [] - description = u'Non-ASCII character: \u2603' - new_name = u'Test Alarm Updated' - actions_enabled = True - alarm_def_id = u'00000001-0001-0001-0001-000000000001' - alarm_expression = u'max(test.metric{hostname=host}) gte 1' - severity = u'LOW' - match_by = u'hostname' - self.alarm_def_repo_mock.return_value.update_or_patch_alarm_definition.return_value = ( - {u'alarm_actions': [], - u'ok_actions': [], - u'description': description, - u'match_by': match_by, - u'name': new_name, - u'actions_enabled': actions_enabled, - u'undetermined_actions': [], - u'is_deterministic': False, - u'expression': alarm_expression, - u'id': alarm_def_id, - u'severity': severity}, - {'old': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'is_deterministic': False, - 'periods': 1})}, - 'changed': {}, - 'new': {}, - 'unchanged': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'is_deterministic': False, - 'periods': 1})}}) - - expected_def = { - u'id': alarm_def_id, - u'alarm_actions': [], - u'ok_actions': [], - u'description': description, - u'links': [{u'href': u'http://falconframework.org/v2.0/alarm-definitions/' - u'00000001-0001-0001-0001-000000000001', - u'rel': u'self'}], - u'match_by': [match_by], - u'name': new_name, - u'actions_enabled': actions_enabled, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': alarm_expression, - u'severity': severity, - } - - alarm_def = { - u'name': u'Test Alarm Updated', - } - - result = self.simulate_request(path="/v2.0/alarm-definitions/%s" % expected_def[u'id'], - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PATCH", - body=json.dumps(alarm_def)) - - self.assertEqual(result.status, falcon.HTTP_200) - result_def = result.json - self.assertEqual(result_def, expected_def) - # If the alarm-definition-updated event does not have all of the - # fields set, the Threshold Engine will get confused. For example, - # if alarmActionsEnabled is none, thresh will read that as false - # and pass that value onto the Notification Engine which will not - # create a notification even actions_enabled is True in the - # database. So, ensure all fields are set correctly - ((_, event), _) = self._send_event.call_args - expr = u'max(test.metric{hostname=host}, 60) gte 1 times 1' - sub_expression = {'11111': {u'expression': expr, - u'function': 'max', - u'metricDefinition': { - u'dimensions': {'hostname': 'host'}, - u'name': 'test.metric'}, - u'operator': 'gte', - u'period': 60, - u'periods': 1, - u'threshold': 1}} - fields = {u'alarmActionsEnabled': actions_enabled, - u'alarmDefinitionId': alarm_def_id, - u'alarmDescription': description, - u'alarmExpression': alarm_expression, - u'alarmName': new_name, - u'changedSubExpressions': {}, - u'matchBy': [match_by], - u'severity': severity, - u'tenantId': u'fedcba9876543210fedcba9876543210', - u'newAlarmSubExpressions': {}, - u'oldAlarmSubExpressions': sub_expression, - u'unchangedSubExpressions': sub_expression} - reference = {u'alarm-definition-updated': fields} - self.assertEqual(reference, event) - - def test_alarm_definition_update_missing_fields(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [] - self.alarm_def_repo_mock.return_value.update_or_patch_alarm_definition.return_value = ( - {u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Non-ASCII character: \u2603', - u'match_by': u'hostname', - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - u'is_deterministic': False, - u'severity': u'LOW'}, - {'old': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'periods': 1, - 'is_deterministic': False})}, - 'changed': {}, - 'new': {}, - 'unchanged': {'11111': sub_alarm_definition.SubAlarmDefinition( - row={'id': '11111', - 'alarm_definition_id': u'00000001-0001-0001-0001-000000000001', - 'function': 'max', - 'metric_name': 'test.metric', - 'dimensions': 'hostname=host', - 'operator': 'gte', - 'threshold': 1, - 'period': 60, - 'periods': 1, - 'is_deterministic': False})}}) - - expected_def = { - u'id': u'00000001-0001-0001-0001-000000000001', - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Non-ASCII character: \u2603', - u'links': [{u'href': u'http://falconframework.org/v2.0/alarm-definitions/' - u'00000001-0001-0001-0001-000000000001', - u'rel': u'self'}], - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - u'deterministic': False - } - - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW' - } - - result = self.simulate_request(path="/v2.0/alarm-definitions/%s" % expected_def[u'id'], - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PUT", - body=json.dumps(alarm_def)) - - self.assertEqual(result.status, falcon.HTTP_200) - result_def = result.json - self.assertEqual(result_def, expected_def) - - for key, value in list(alarm_def.items()): - del alarm_def[key] - - response = self.simulate_request( - path="/v2.0/alarm-definitions/%s" % expected_def[u'id'], - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method="PUT", - body=json.dumps(alarm_def)) - self.assertEqual(response.status, "422 Unprocessable Entity", - u"should have failed without key {}".format(key)) - alarm_def[key] = value - - def test_alarm_definition_get_specific_alarm(self): - - self.alarm_def_repo_mock.return_value.get_alarm_definition.return_value = { - 'alarm_actions': None, - 'ok_actions': None, - # The description field was decoded to unicode when the - # alarm_definition was created. - 'description': u'Non-ASCII character: \u2603', - 'match_by': u'hostname', - 'name': u'Test Alarm', - 'actions_enabled': 1, - 'undetermined_actions': None, - 'deterministic': False, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - } - - expected_data = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Non-ASCII character: \u2603', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW', - } - - response = self.simulate_request( - path='/v2.0/alarm-definitions/%s' % (expected_data[u'id']), - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - }) - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_alarm_definition_get_specific_alarm_description_none(self): - - self.alarm_def_repo_mock.return_value.get_alarm_definition.return_value = { - 'alarm_actions': None, - 'ok_actions': None, - 'description': None, - 'match_by': u'hostname', - 'name': u'Test Alarm', - 'actions_enabled': 1, - 'undetermined_actions': None, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - } - - expected_data = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': None, - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW', - } - - response = self.simulate_request( - path='/v2.0/alarm-definitions/%s' % (expected_data[u'id']), - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - }) - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_get_alarm_definitions_with_multibyte_character(self): - def_name = 'alarm_definition' - if six.PY2: - def_name = def_name.decode('utf8') - - expected_data = { - u'alarm_actions': [], u'ok_actions': [], - u'description': None, u'match_by': [u'hostname'], - u'actions_enabled': True, u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - u'severity': u'LOW', u'name': def_name - } - - self.alarm_def_repo_mock.return_value.get_alarm_definition.return_value = { - 'alarm_actions': None, - 'ok_actions': None, - 'description': None, - 'match_by': u'hostname', - 'name': def_name, - 'actions_enabled': 1, - 'undetermined_actions': None, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - } - - response = self.simulate_request( - path='/v2.0/alarm-definitions/%s' % (expected_data[u'id']), - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - } - ) - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_alarm_definition_get_alarm_definition_list(self): - self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = [{ - 'alarm_actions': None, - 'ok_actions': None, - 'description': None, - 'match_by': u'hostname', - 'name': u'Test Alarm', - 'actions_enabled': 1, - 'undetermined_actions': None, - 'expression': u'max(test.metric{hostname=host}) gte 1', - 'id': u'00000001-0001-0001-0001-000000000001', - 'severity': u'LOW' - }] - link = 'http://falconframework.org/v2.0/alarm-definitions/' \ - '00000001-0001-0001-0001-000000000001' - expected_data = { - u'elements': [{ - u'alarm_actions': [], - u'ok_actions': [], - u'description': '', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'id': u'00000001-0001-0001-0001-000000000001', - 'links': [{ - 'href': link, - 'rel': 'self'}], - u'severity': u'LOW'}] - } - - response = self.simulate_request( - path='/v2.0/alarm-definitions', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID - }, - query_string='name=Test Alarm') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - response = self.simulate_request( - path='/v2.0/alarm-definitions', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID - }, - query_string='sort_by=name') - - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - response = self.simulate_request( - path='/v2.0/alarm-definitions', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID - }, - query_string='severity=LOW') - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - response = self.simulate_request( - path='/v2.0/alarm-definitions', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID - }, - query_string='offset=1') - self.assertEqual(response.status, falcon.HTTP_200) - self.assertThat(response, RESTResponseEquals(expected_data)) - - def test_alarm_definition_get_alarm_definition_list_incorrect(self): - response = self.simulate_request( - path='/v2.0/alarm-definitions', - headers={ - 'X-Roles': CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID - }, - query_string='offset=definitelyNotINT') - self.assertEqual(response.status, falcon.HTTP_422) - - def test_alarm_definition_get_query_alarm_definition_name(self): - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - name = alarm_definitions.get_query_alarm_definition_name(alarm_def) - self.assertEqual(alarm_def['name'], name) - - alarm_def.pop('name') - - self.assertRaises(HTTPUnprocessableEntityError, - alarm_definitions.get_query_alarm_definition_name, - alarm_def) - - name = alarm_definitions.get_query_alarm_definition_name(alarm_def, return_none=True) - self.assertIsNone(name) - - def test_alarm_definition_get_query_alarm_definition_expression(self): - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - expression = alarm_definitions.get_query_alarm_definition_expression(alarm_def) - self.assertEqual(alarm_def['expression'], expression) - - alarm_def.pop('expression') - - self.assertRaises(HTTPUnprocessableEntityError, - alarm_definitions.get_query_alarm_definition_expression, - alarm_def) - - expression = alarm_definitions.get_query_alarm_definition_expression(alarm_def, - return_none=True) - self.assertIsNone(expression) - - def test_alarm_definition_get_query_alarm_definition_description(self): - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'Short description', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - description = alarm_definitions.get_query_alarm_definition_description(alarm_def) - self.assertEqual(alarm_def['description'], description) - - alarm_def.pop('description') - - description = alarm_definitions.get_query_alarm_definition_description(alarm_def) - self.assertEqual('', description) - - description = alarm_definitions.get_query_alarm_definition_description(alarm_def, - return_none=True) - self.assertIsNone(description) - - def test_alarm_definition_get_query_alarm_definition_severity(self): - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'CRITICAL', - } - severity = alarm_definitions.get_query_alarm_definition_severity(alarm_def) - self.assertEqual(alarm_def['severity'], severity) - - alarm_def['severity'] = u'Why so serious' - self.assertRaises(HTTPUnprocessableEntityError, - alarm_definitions.get_query_alarm_definition_severity, - alarm_def) - - alarm_def.pop('severity') - severity = alarm_definitions.get_query_alarm_definition_severity(alarm_def) - self.assertEqual('LOW', severity) - - severity = alarm_definitions.get_query_alarm_definition_severity(alarm_def, - return_none=True) - self.assertIsNone(severity) - - def test_alarm_definition_get_query_alarm_definition_match_by(self): - alarm_def = { - u'alarm_actions': [], - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - match_by = alarm_definitions.get_query_alarm_definition_match_by(alarm_def) - self.assertEqual(alarm_def['match_by'], match_by) - - alarm_def.pop('match_by') - - match_by = alarm_definitions.get_query_alarm_definition_match_by(alarm_def) - self.assertEqual([], match_by) - - expression = alarm_definitions.get_query_alarm_definition_match_by(alarm_def, - return_none=True) - self.assertIsNone(expression) - - def test_alarm_definition_get_query_alarm_definition_alarm_actions(self): - alarm_def = { - u'alarm_actions': 'c60ec47e-5038-4bf1-9f95-4046c6e9a759', - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - alarm_actions = alarm_definitions.get_query_alarm_definition_alarm_actions(alarm_def) - self.assertEqual(alarm_def['alarm_actions'], alarm_actions) - - alarm_def.pop('alarm_actions') - - alarm_actions = alarm_definitions.get_query_alarm_definition_alarm_actions(alarm_def) - self.assertEqual([], alarm_actions) - - alarm_actions = alarm_definitions.get_query_alarm_definition_alarm_actions(alarm_def, - return_none=True) - self.assertIsNone(alarm_actions) - - def test_alarm_definition_get_query_alarm_definition_undetermined_actions(self): - alarm_def = { - u'alarm_actions': 'c60ec47e-5038-4bf1-9f95-4046c6e9a759', - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': 'c60ec47e-5038-4bf1-9f95-4046c6e9a759', - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - undetermined_actions = \ - alarm_definitions.get_query_alarm_definition_undetermined_actions(alarm_def) - self.assertEqual(alarm_def['undetermined_actions'], undetermined_actions) - - alarm_def.pop('undetermined_actions') - - undetermined_actions = \ - alarm_definitions.get_query_alarm_definition_undetermined_actions(alarm_def) - self.assertEqual([], undetermined_actions) - - undetermined_actions = \ - alarm_definitions.get_query_alarm_definition_undetermined_actions(alarm_def, - return_none=True) - self.assertIsNone(undetermined_actions) - - def test_alarm_definition_get_query_alarm_definition_ok_actions(self): - alarm_def = { - u'ok_actions': 'c60ec47e-5038-4bf1-9f95-4046c6e9a759', - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - ok_actions = alarm_definitions.get_query_ok_actions(alarm_def) - self.assertEqual(alarm_def['ok_actions'], ok_actions) - - alarm_def.pop('ok_actions') - - ok_actions = alarm_definitions.get_query_ok_actions(alarm_def) - self.assertEqual([], ok_actions) - - ok_actions = alarm_definitions.get_query_ok_actions(alarm_def, return_none=True) - self.assertIsNone(ok_actions) - - def test_alarm_definition_get_query_alarm_definition_actions_enabled(self): - alarm_def = { - u'alarm_actions': 'c60ec47e-5038-4bf1-9f95-4046c6e9a759', - u'ok_actions': [], - u'description': u'', - u'match_by': [u'hostname'], - u'name': u'Test Alarm', - u'actions_enabled': True, - u'undetermined_actions': [], - u'deterministic': False, - u'expression': u'max(test.metric{hostname=host}) gte 1', - u'severity': u'LOW', - } - actions_enabled = alarm_definitions.get_query_alarm_definition_actions_enabled(alarm_def) - self.assertEqual(alarm_def['actions_enabled'], actions_enabled) - - alarm_def.pop('actions_enabled') - - actions_enabled = alarm_definitions.get_query_alarm_definition_actions_enabled(alarm_def) - self.assertEqual('', actions_enabled) - - actions_enabled = alarm_definitions.\ - get_query_alarm_definition_actions_enabled(alarm_def, - return_none=True) - self.assertIsNone(actions_enabled) - - actions_enabled = alarm_definitions. \ - get_query_alarm_definition_actions_enabled(alarm_def, - required=True, - return_none=True) - self.assertIsNone(actions_enabled) - - self.assertRaises(HTTPUnprocessableEntityError, - alarm_definitions.get_query_alarm_definition_actions_enabled, - alarm_def, - required=True, - return_none=False) - - def test_alarm_definition_get_query_alarm_definition_is_definition_deterministic(self): - expression = u'max(test.metric{hostname=host}) gte 1' - is_deterministic = alarm_definitions.is_definition_deterministic(expression) - self.assertEqual(False, is_deterministic) - - expression = u'max(test.metric{hostname=host}, deterministic) gte 1' - is_deterministic = alarm_definitions.is_definition_deterministic(expression) - self.assertEqual(True, is_deterministic) diff --git a/monasca_api/tests/test_alarms_db_health_check.py b/monasca_api/tests/test_alarms_db_health_check.py deleted file mode 100644 index e56ec577a..000000000 --- a/monasca_api/tests/test_alarms_db_health_check.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from monasca_api import config -from monasca_api.healthcheck import alarms_db_check as rdc -from monasca_api.tests import base - -CONF = config.CONF - - -class TestMetricsDbHealthCheckLogic(base.BaseTestCase): - - db_connection = "mysql+pymysql://test:test@localhost/mon?charset=utf8mb4" - mocked_config = { - 'connection': db_connection - } - - def setUp(self): - super(TestMetricsDbHealthCheckLogic, self).setUp() - self.conf_default(group='database', **self.mocked_config) - - @classmethod - def tearDownClass(cls): - if hasattr(CONF, 'sql_engine'): - delattr(CONF, 'sql_engine') - - @mock.patch('monasca_api.healthcheck.alarms_db_check.' - 'sql_repository.get_engine') - def test_should_pass_db_ok(self, _): - - db_health = rdc.AlarmsDbHealthCheck() - db_health.check_db_status = mock.Mock(return_value=(True, 'OK')) - result = db_health.health_check() - - self.assertTrue(result.healthy) - self.assertEqual('OK', result.message) - - @mock.patch('monasca_api.healthcheck.alarms_db_check.' - 'sql_repository.get_engine') - def test_should_fail_db_unavailable(self, _): - - db_health = rdc.AlarmsDbHealthCheck() - db_health.check_db_status = mock.Mock(return_value=(False, 'bar')) - result = db_health.health_check() - - self.assertFalse(result.healthy) - self.assertEqual('bar', result.message) diff --git a/monasca_api/tests/test_config_types.py b/monasca_api/tests/test_config_types.py deleted file mode 100644 index abb839869..000000000 --- a/monasca_api/tests/test_config_types.py +++ /dev/null @@ -1,55 +0,0 @@ -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from monasca_api.conf import types -from monasca_api.tests import base - - -class TestHostAddressPortType(base.BaseTestCase): - def setUp(self): - super(TestHostAddressPortType, self).setUp() - self.types = types.HostAddressPortType() - - def test_ip_address(self): - self.assertEqual('127.0.0.1:2121', self.types('127.0.0.1:2121')) - - def test_hostname(self): - self.assertEqual('localhost:2121', self.types('localhost:2121')) - - def test_ipv6_address(self): - self.assertEqual('2001:db8:85a3::8a2e:370:2121', - self.types('[2001:db8:85a3::8a2e:370]:2121')) - - def test_ipv6_hostname(self): - self.assertEqual('::1:2121', self.types('[::1]:2121')) - - # failure scenario - def test_missing_port(self): - self.assertRaises(ValueError, self.types, '127.0.0.1') - - def test_missing_address(self): - self.assertRaises(ValueError, self.types, ':123') - - def test_incorrect_ip(self): - self.assertRaises(ValueError, self.types, '127.surprise.0.1:2121') - - def test_incorrect_ipv6(self): - self.assertRaises(ValueError, self.types, '[2001:db8:8a2e:370]:2121') - - def test_incorrect_port(self): - self.assertRaises(ValueError, self.types, '127.0.0.1:65536') - self.assertRaises(ValueError, self.types, '127.0.0.1:sample') diff --git a/monasca_api/tests/test_healthchecks.py b/monasca_api/tests/test_healthchecks.py deleted file mode 100644 index 92de83de1..000000000 --- a/monasca_api/tests/test_healthchecks.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from unittest import mock - -from monasca_api import config -from monasca_api.healthcheck import base -from monasca_api import healthchecks -from monasca_api.tests import base as test_base - - -CONF = config.CONF -ENDPOINT = '/healthcheck' - - -class TestHealthChecks(test_base.BaseApiTestCase): - - @classmethod - def tearDownClass(cls): - if hasattr(CONF, 'sql_engine'): - delattr(CONF, 'sql_engine') - - def setUp(self): - super(TestHealthChecks, self).setUp() - self.resources = healthchecks.HealthChecks() - self.app.add_route( - ENDPOINT, - self.resources - ) - - @mock.patch('monasca_api.healthcheck.alarms_db_check.sql_repository.get_engine') - @mock.patch( - 'monasca_api.healthcheck.metrics_db_check.MetricsDbCheck') - def test_should_return_200_for_head(self, metrics_db_check, _): - result = self.simulate_request(path=ENDPOINT, method='HEAD') - self.assertEqual(falcon.HTTP_NO_CONTENT, result.status) - - @mock.patch('monasca_api.healthcheck.kafka_check.KafkaHealthCheck') - @mock.patch( - 'monasca_api.healthcheck.alarms_db_check.AlarmsDbHealthCheck') - @mock.patch( - 'monasca_api.healthcheck.metrics_db_check.MetricsDbCheck') - @mock.patch( - 'monasca_api.healthcheck.alarms_db_check.sql_repository.SQLRepository') - def test_should_report_healthy_if_all_services_healthy(self, kafka_check, - alarms_db_check, - metrics_db_check, - _): - kafka_check.health_check.return_value = base.CheckResult(True, 'OK') - alarms_db_check.health_check.return_value = base.CheckResult(True, - 'OK') - metrics_db_check.health_check.return_value = base.CheckResult(True, - 'OK') - self.resources._kafka_check = kafka_check - self.resources._alarm_db_check = alarms_db_check - self.resources._metrics_db_check = metrics_db_check - - response = self.simulate_request(path=ENDPOINT, - headers={ - 'Content-Type': 'application/json' - }, - method='GET') - self.assertEqual(falcon.HTTP_OK, response.status) - - response = response.json - self.assertIn('kafka', response) - self.assertIn('alarms_database', response) - self.assertIn('metrics_database', response) - self.assertEqual('OK', response.get('kafka')) - self.assertEqual('OK', response.get('alarms_database')) - self.assertEqual('OK', response.get('metrics_database')) - - @mock.patch('monasca_api.healthcheck.kafka_check.KafkaHealthCheck') - @mock.patch( - 'monasca_api.healthcheck.alarms_db_check.AlarmsDbHealthCheck') - @mock.patch( - 'monasca_api.healthcheck.metrics_db_check.MetricsDbCheck') - @mock.patch( - 'monasca_api.healthcheck.alarms_db_check.sql_repository.SQLRepository') - def test_should_report_not_healthy_if_one_service_not_healthy(self, - kafka_check, - alarms_db_check, - metrics_db_check, - _): - test_list = [ - {'kafka': {'healthy': False, 'message': 'Unavailable'}, - 'alarms_db': {'healthy': True, 'message': 'OK'}, - 'netrics_db': {'healthy': True, 'message': 'OK'} - }, - {'kafka': {'healthy': True, 'message': 'OK'}, - 'alarms_db': {'healthy': False, 'message': 'Connection Error'}, - 'netrics_db': {'healthy': True, 'message': 'OK'} - }, - {'kafka': {'healthy': True, 'message': 'OK'}, - 'alarms_db': {'healthy': True, 'message': 'OK'}, - 'netrics_db': {'healthy': False, 'message': 'Error'} - }, - ] - - for service in test_list: - kafka_check.health_check.return_value = base.CheckResult(service['kafka']['healthy'], - service['kafka']['message']) - alarms_db_check.health_check.return_value = base.CheckResult( - service['alarms_db']['healthy'], service['alarms_db']['message']) - metrics_db_check.health_check.return_value = base.CheckResult( - service['netrics_db']['healthy'], service['netrics_db']['message']) - self.resources._kafka_check = kafka_check - self.resources._alarm_db_check = alarms_db_check - self.resources._metrics_db_check = metrics_db_check - - response = self.simulate_request(path=ENDPOINT, - headers={ - 'Content-Type': 'application/json' - }, - method='GET') - self.assertEqual(falcon.HTTP_SERVICE_UNAVAILABLE, - response.status) - - response = response.json - self.assertIn('kafka', response) - self.assertIn('alarms_database', response) - self.assertIn('metrics_database', response) - self.assertEqual(service['kafka']['message'], response.get('kafka')) - self.assertEqual(service['alarms_db']['message'], response.get('alarms_database')) - self.assertEqual(service['netrics_db']['message'], response.get('metrics_database')) diff --git a/monasca_api/tests/test_helpers.py b/monasca_api/tests/test_helpers.py deleted file mode 100644 index 231477eca..000000000 --- a/monasca_api/tests/test_helpers.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from falcon import errors -from falcon import testing - -from oslo_policy import policy as os_policy - -from monasca_api.api.core import request -from monasca_api.common.policy import policy_engine as policy -from monasca_api.tests import base -import monasca_api.v2.reference.helpers as helpers - -from monasca_api.common.rest import utils as rest_utils - - -class TestHelpersFunction(base.BaseTestCase): - - def test_from_json(self): - body_json = {'test_body': 'test'} - req = request.Request( - testing.create_environ( - body=rest_utils.as_json(body_json), - ) - ) - response = helpers.from_json(req) - self.assertEqual(body_json, response) - - def test_from_json_incorrect_message(self): - req = request.Request( - testing.create_environ( - body='incorrect message', - ) - ) - self.assertRaises(errors.HTTPBadRequest, helpers.from_json, req) - - def test_to_json(self): - test_dict = {'test_body': 'test'} - expected_json = '{"test_body": "test"}' - response = helpers.to_json(test_dict) - self.assertEqual(expected_json, response) - - def test_validate_json_content_type(self): - req = request.Request( - testing.create_environ( - headers={'Content-Type': 'application/json'} - ) - ) - helpers.validate_json_content_type(req) - - def test_validate_json_content_type_incorrect_content_type(self): - req = request.Request( - testing.create_environ( - headers={'Content-Type': 'multipart/form-data'} - ) - ) - self.assertRaises(errors.HTTPBadRequest, helpers.validate_json_content_type, req) - - def test_validate_json_content_type_missing_content_type(self): - req = request.Request(testing.create_environ()) - self.assertRaises(errors.HTTPBadRequest, helpers.validate_json_content_type, req) - - -class TestGetXTenantOrTenantId(base.BaseApiTestCase): - def setUp(self): - super(TestGetXTenantOrTenantId, self).setUp() - rules = [ - os_policy.RuleDefault("example:allowed", "@"), - os_policy.RuleDefault("example:denied", "!"), - os_policy.RuleDefault("example:authorized", - "role:role_1 or role:role_2") - ] - policy.reset() - policy.init() - policy._ENFORCER.register_defaults(rules) - - def test_return_tenant_id_on_authorized_roles(self): - - for role in ['role_1', 'role_2']: - req_context = self._get_request_context(role) - self.assertEqual( - 'fake_tenant_id', - helpers.get_x_tenant_or_tenant_id( - req_context, ['example:authorized'] - ) - ) - - def test_return_tenant_id_on_allowed_rules(self): - req_context = self._get_request_context() - self.assertEqual( - 'fake_tenant_id', - helpers.get_x_tenant_or_tenant_id( - req_context, - ['example:allowed'] - ) - ) - - def test_return_project_id_on_unauthorized_role(self): - req_context = self._get_request_context() - self.assertEqual('fake_project_id', - helpers.get_x_tenant_or_tenant_id( - req_context, - ['example:authorized'])) - - def test_return_project_id_on_denied_rules(self): - req_context = self._get_request_context() - self.assertEqual( - 'fake_project_id', - helpers.get_x_tenant_or_tenant_id( - req_context, - ['example:denied'] - ) - ) - - def test_return_project_id_on_unavailable_tenant_id(self): - req_context = self._get_request_context() - req_context.query_string = '' - self.assertEqual( - 'fake_project_id', - helpers.get_x_tenant_or_tenant_id( - req_context, - ['example:allowed'] - ) - ) - - @staticmethod - def _get_request_context(role='fake_role'): - return request.Request( - testing.create_environ( - path="/", - query_string="tenant_id=fake_tenant_id", - headers={ - "X_PROJECT_ID": "fake_project_id", - "X_ROLES": role - } - ) - ) diff --git a/monasca_api/tests/test_kafka_health_check.py b/monasca_api/tests/test_kafka_health_check.py deleted file mode 100644 index dc004ddb5..000000000 --- a/monasca_api/tests/test_kafka_health_check.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from six import PY3 - -from monasca_common.kafka_lib import client - -from monasca_api import config -from monasca_api.healthcheck import kafka_check as kc -from monasca_api.tests import base - -CONF = config.CONF - - -class TestKafkaHealthCheckLogic(base.BaseTestCase): - - mock_kafka_url = 'localhost:1234' - mocked_topics = b'test1' - mocked_event_topic = b'test2' - mocked_alarm_state_topic = b'test3' - mocked_config = { - 'uri': mock_kafka_url, - 'metrics_topic': mocked_topics.decode('utf-8') if PY3 else mocked_topics, - 'events_topic': mocked_event_topic.decode('utf-8') if PY3 else mocked_event_topic, - 'alarm_state_transitions_topic': - mocked_alarm_state_topic.decode('utf-8') if PY3 else mocked_alarm_state_topic - } - - def __init__(self, *args, **kwargs): - super(TestKafkaHealthCheckLogic, self).__init__(*args, **kwargs) - self._conf = None - - def setUp(self): - super(TestKafkaHealthCheckLogic, self).setUp() - self.conf_default(group='kafka', **self.mocked_config) - - @mock.patch('monasca_api.healthcheck.kafka_check.client.KafkaClient') - def test_should_fail_kafka_unavailable(self, kafka_client): - kafka = mock.Mock() - kafka_client.side_effect = client.KafkaUnavailableError() - kafka_client.return_value = kafka - - kafka_health = kc.KafkaHealthCheck() - result = kafka_health.health_check() - - self.assertFalse(result.healthy) - kafka.close.assert_not_called() - - @mock.patch('monasca_api.healthcheck.kafka_check.client.KafkaClient') - def test_should_fail_missing_topic(self, kafka_client): - kafka = mock.Mock() - kafka.topics = ['topic1'] - kafka_client.return_value = kafka - - kafka_health = kc.KafkaHealthCheck() - result = kafka_health.health_check() - - self.assertFalse(result.healthy) - kafka.close.assert_called_once() - - @mock.patch('monasca_api.healthcheck.kafka_check.client.KafkaClient') - def test_should_pass(self, kafka_client): - kafka = mock.Mock() - kafka.topics = [self.mocked_topics, - self.mocked_event_topic, - self.mocked_alarm_state_topic] - kafka_client.return_value = kafka - - kafka_health = kc.KafkaHealthCheck() - result = kafka_health.health_check() - - self.assertTrue(result.healthy) - kafka.close.assert_called_once() diff --git a/monasca_api/tests/test_keystone_protocol.py b/monasca_api/tests/test_keystone_protocol.py deleted file mode 100644 index 1e99b727a..000000000 --- a/monasca_api/tests/test_keystone_protocol.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from monasca_api.healthcheck import keystone_protocol -from monasca_api.tests import base - -_CONF = {} - - -class TestKeystoneProtocol(base.BaseTestCase): - - def test_should_return_none_if_healthcheck(self): - mocked_api = mock.Mock() - instance = keystone_protocol.SkippingAuthProtocol(mocked_api, _CONF) - request = mock.Mock() - request.path = '/healthcheck' - - ret_val = instance.process_request(request) - - self.assertIsNone(ret_val) - - @mock.patch('keystonemiddleware.auth_token.AuthProtocol.process_request') - def test_should_enter_keystone_auth_if_not_healthcheck(self, proc_request): - mocked_api = mock.Mock() - instance = keystone_protocol.SkippingAuthProtocol(mocked_api, _CONF) - request = mock.Mock() - request.path = '/v2.0/logs/single' - - instance.process_request(request) - - self.assertTrue(proc_request.called) diff --git a/monasca_api/tests/test_log_publisher.py b/monasca_api/tests/test_log_publisher.py deleted file mode 100644 index 42e5494f1..000000000 --- a/monasca_api/tests/test_log_publisher.py +++ /dev/null @@ -1,291 +0,0 @@ -# Copyright 2015 kornicameister@gmail.com -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import datetime -import random -from unittest import mock - -from oslo_config import cfg -from oslo_log import log -import simplejson as json -import six -import unittest - -from monasca_api.api.core.log import log_publisher -from monasca_api.api.core.log import model -from monasca_api.tests import base - - -LOG = log.getLogger(__name__) -EPOCH_START = datetime.datetime(1970, 1, 1) - - -class TestSendMessage(base.BaseTestCase): - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_not_send_empty_message(self, _): - instance = log_publisher.LogPublisher() - - instance._kafka_publisher = mock.Mock() - instance.send_message({}) - - self.assertFalse(instance._kafka_publisher.publish.called) - - @unittest.expectedFailure - def test_should_not_send_message_not_dict(self): - instance = log_publisher.LogPublisher() - not_dict_value = 123 - instance.send_message(not_dict_value) - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_not_send_message_missing_keys(self, _): - # checks every combination of missing keys - # test does not rely on those keys having a value or not, - # it simply assumes that values are set but important - # message (i.e. envelope) properties are missing entirely - # that's why there are two loops instead of three - - instance = log_publisher.LogPublisher() - keys = ['log', 'creation_time', 'meta'] - - for key_1 in keys: - diff = keys[:] - diff.remove(key_1) - for key_2 in diff: - message = { - key_1: random.randint(10, 20), - key_2: random.randint(30, 50) - } - self.assertRaises(log_publisher.InvalidMessageException, - instance.send_message, - message) - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_not_send_message_missing_values(self, _): - # original message assumes that every property has value - # test modify each property one by one by removing that value - # (i.e. creating false-like value) - instance = log_publisher.LogPublisher() - message = { - 'log': { - 'message': '11' - }, - 'creation_time': 123456, - 'meta': { - 'region': 'pl' - } - } - - for key in message: - tmp_message = message - tmp_message[key] = None - self.assertRaises(log_publisher.InvalidMessageException, - instance.send_message, - tmp_message) - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_send_message(self, kafka_producer): - instance = log_publisher.LogPublisher() - instance._kafka_publisher = kafka_producer - instance.send_message({}) - - creation_time = ((datetime.datetime.utcnow() - EPOCH_START) - .total_seconds()) - application_type = 'monasca-log-api' - dimension_1_name = 'disk_usage' - dimension_1_value = '50' - dimension_2_name = 'cpu_time' - dimension_2_value = '60' - - msg = model.Envelope( - log={ - 'message': '1', - 'application_type': application_type, - 'dimensions': { - dimension_1_name: dimension_1_value, - dimension_2_name: dimension_2_value - } - }, - meta={ - 'tenantId': '1' - } - ) - msg['creation_time'] = creation_time - instance.send_message(msg) - - instance._kafka_publisher.publish.assert_called_once_with( - cfg.CONF.kafka.logs_topics[0], - [json.dumps(msg, ensure_ascii=False).encode('utf-8')]) - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_send_message_multiple_topics(self, _): - topics = ['logs_topics', 'analyzer', 'tester'] - self.conf_override(logs_topics=topics, - group='kafka') - self.conf_override(max_message_size=5000, - group='log_publisher') - - instance = log_publisher.LogPublisher() - instance._kafka_publisher = mock.Mock() - instance.send_message({}) - - creation_time = ((datetime.datetime.utcnow() - EPOCH_START) - .total_seconds()) - dimension_1_name = 'disk_usage' - dimension_1_value = '50' - dimension_2_name = 'cpu_time' - dimension_2_value = '60' - application_type = 'monasca-log-api' - msg = model.Envelope( - log={ - 'message': '1', - 'application_type': application_type, - 'dimensions': { - dimension_1_name: dimension_1_value, - dimension_2_name: dimension_2_value - } - }, - meta={ - 'tenantId': '1' - } - ) - msg['creation_time'] = creation_time - json_msg = json.dumps(msg, ensure_ascii=False) - - instance.send_message(msg) - - self.assertEqual(len(topics), - instance._kafka_publisher.publish.call_count) - for topic in topics: - instance._kafka_publisher.publish.assert_any_call( - topic, - [json_msg.encode('utf-8')]) - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_send_unicode_message(self, kp): - instance = log_publisher.LogPublisher() - instance._kafka_publisher = kp - - for um in base.UNICODE_MESSAGES: - case, msg = um.values() - try: - envelope = model.Envelope( - log={ - 'message': msg, - 'application_type': 'test', - 'dimensions': { - 'test': 'test_log_publisher', - 'case': 'test_should_send_unicode_message' - } - }, - meta={ - 'tenantId': 1 - } - ) - instance.send_message(envelope) - - expected_message = json.dumps(envelope, ensure_ascii=False) - - if six.PY3: - expected_message = expected_message.encode('utf-8') - - instance._kafka_publisher.publish.assert_called_with( - cfg.CONF.kafka.logs_topics[0], - [expected_message] - ) - except Exception: - LOG.exception('Failed to evaluate unicode case %s', case) - raise - - -@mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') -class TestTruncation(base.BaseTestCase): - EXTRA_CHARS_SIZE = len(bytearray(json.dumps({ - 'log': { - 'message': None - } - }), 'utf8')) - 2 - - def test_should_not_truncate_message_if_size_is_smaller(self, _): - diff_size = random.randint(1, 100) - self._run_truncate_test(log_size_factor=-diff_size, - truncate_by=0) - - def test_should_not_truncate_message_if_size_equal_to_max(self, _): - self._run_truncate_test(log_size_factor=0, - truncate_by=0) - - def test_should_truncate_too_big_message(self, _): - diff_size = random.randint(1, 100) - max_size = 1000 - truncate_by = ((max_size - - (max_size - log_publisher._TRUNCATED_PROPERTY_SIZE)) + - log_publisher._TRUNCATION_SAFE_OFFSET + diff_size) - self._run_truncate_test(max_message_size=1000, - log_size_factor=diff_size, - truncate_by=truncate_by) - - def _run_truncate_test(self, - max_message_size=1000, - log_size_factor=0, - truncate_by=0, - gen_fn=base.generate_unique_message): - - log_size = (max_message_size - - TestTruncation.EXTRA_CHARS_SIZE - - log_publisher._KAFKA_META_DATA_SIZE - - log_publisher._TIMESTAMP_KEY_SIZE + - log_size_factor) - - expected_log_message_size = log_size - truncate_by - - self.conf_override( - group='log_publisher', - max_message_size=max_message_size - ) - - log_msg = gen_fn(log_size) - envelope = { - 'log': { - 'message': log_msg - } - } - - instance = log_publisher.LogPublisher() - - envelope_copy = copy.deepcopy(envelope) - json_envelope = instance._truncate(envelope_copy) - - parsed_envelope = json.loads(json_envelope) - - parsed_log_message = parsed_envelope['log']['message'] - parsed_log_message_len = len(parsed_log_message) - - if truncate_by > 0: - self.assertNotEqual(envelope['log']['message'], - parsed_log_message) - else: - self.assertEqual(envelope['log']['message'], - parsed_log_message) - - self.assertEqual(expected_log_message_size, parsed_log_message_len) diff --git a/monasca_api/tests/test_logs.py b/monasca_api/tests/test_logs.py deleted file mode 100644 index aca9cf48e..000000000 --- a/monasca_api/tests/test_logs.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -import simplejson as json -from unittest import mock - -from monasca_api.tests import base -from monasca_api.v2.reference import logs - -ENDPOINT = '/logs' -TENANT_ID = 'bob' -ROLES = 'admin' - - -def _init_resource(test): - resource = logs.Logs() - test.app.add_route(ENDPOINT, resource) - return resource - - -def _generate_payload(log_count=None, messages=None): - if not log_count and messages: - log_count = len(messages) - logs = [{ - 'message': messages[it], - 'dimensions': { - 'hostname': 'host_%d' % it, - 'component': 'component_%d' % it, - 'service': 'service_%d' % it - } - } for it in range(log_count)] - else: - logs = [{ - 'message': base.generate_unique_message(100), - 'dimensions': { - 'hostname': 'host_%d' % it, - 'component': 'component_%d' % it, - 'service': 'service_%d' % it - } - } for it in range(log_count)] - body = { - 'dimensions': { - 'origin': __name__ - }, - 'logs': logs - } - - return body, logs - - -class TestApiLogsVersion(base.BaseApiTestCase): - - @mock.patch('monasca_api.v2.common.bulk_processor.BulkProcessor') - def test_should_return_as_version(self, _): - logs_resource = logs.Logs() - self.assertEqual('v2.0', logs_resource.version) - - -class TestApiLogs(base.BaseApiTestCase): - - @mock.patch('monasca_api.v2.common.bulk_processor.BulkProcessor') - def test_should_pass_cross_tenant_id(self, bulk_processor): - logs_resource = _init_resource(self) - logs_resource._processor = bulk_processor - - body, logs = _generate_payload(1) - payload = json.dumps(body) - content_length = len(payload) - response = self.simulate_request( - path='/logs', - method='POST', - query_string='project_id=1', - headers={ - 'X_ROLES': ROLES, - 'Content-Type': 'application/json', - 'Content-Length': str(content_length) - }, - body=payload - ) - self.assertEqual(falcon.HTTP_204, response.status) - logs_resource._processor.send_message.assert_called_with( - logs=logs, - global_dimensions=body['dimensions'], - log_tenant_id='1') - - @mock.patch('monasca_api.v2.common.bulk_processor.BulkProcessor') - def test_should_fail_not_delegate_ok_cross_tenant_id(self, _): - _init_resource(self) - response = self.simulate_request( - path='/logs', - method='POST', - query_string='project_id=1', - headers={ - 'X-Roles': ROLES, - 'Content-Type': 'application/json', - 'Content-Length': '0' - } - ) - self.assertEqual(falcon.HTTP_400, response.status) - - @mock.patch('monasca_api.v2.common.bulk_processor.BulkProcessor') - def test_should_pass_empty_cross_tenant_id_wrong_role(self, - bulk_processor): - logs_resource = _init_resource(self) - logs_resource._processor = bulk_processor - - body, _ = _generate_payload(1) - payload = json.dumps(body) - content_length = len(payload) - response = self.simulate_request( - path='/logs', - method='POST', - headers={ - 'X-Roles': ROLES, - 'Content-Type': 'application/json', - 'Content-Length': str(content_length) - }, - body=payload - ) - self.assertEqual(falcon.HTTP_204, response.status) - self.assertEqual(1, bulk_processor.send_message.call_count) - - @mock.patch('monasca_api.v2.common.bulk_processor.BulkProcessor') - def test_should_pass_empty_cross_tenant_id_ok_role(self, - bulk_processor): - logs_resource = _init_resource(self) - logs_resource._processor = bulk_processor - - body, _ = _generate_payload(1) - payload = json.dumps(body) - content_length = len(payload) - response = self.simulate_request( - path='/logs', - method='POST', - headers={ - 'X-Roles': ROLES, - 'Content-Type': 'application/json', - 'Content-Length': str(content_length) - }, - body=payload - ) - self.assertEqual(falcon.HTTP_204, response.status) - self.assertEqual(1, bulk_processor.send_message.call_count) - - -class TestUnicodeLogs(base.BaseApiTestCase): - - @mock.patch('monasca_api.api.core.log.log_publisher.client_factory' - '.get_kafka_producer') - def test_should_send_unicode_messages(self, _): - _init_resource(self) - - messages = [m['input'] for m in base.UNICODE_MESSAGES] - body, _ = _generate_payload(messages=messages) - payload = json.dumps(body, ensure_ascii=False) - response = self.simulate_request( - path='/logs', - method='POST', - headers={ - 'X-Roles': ROLES, - 'Content-Type': 'application/json' - }, - body=payload - ) - self.assertEqual(falcon.HTTP_204, response.status) diff --git a/monasca_api/tests/test_message_formats.py b/monasca_api/tests/test_message_formats.py deleted file mode 100644 index d557f0163..000000000 --- a/monasca_api/tests/test_message_formats.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2018 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import json -from unittest import mock - -from monasca_api.common.messaging.message_formats.metrics import transform -from monasca_api.tests import base - - -class TestMessageFormats(base.BaseTestCase): - @mock.patch('oslo_utils.timeutils.utcnow_ts') - def test_single_metrics(self, time): - time.return_value = 514862580 - tenant_id = 222 - region = 'default' - metrics = 'example.test' - expected_metrics = {'metric': 'example.test', - 'creation_time': 514862580, - 'meta': - {'region': 'default', - 'tenantId': 222}} - transformed_metric = transform(metrics, tenant_id, region) - self.assertIsInstance(transformed_metric, list) - self.assertEqual(len(transformed_metric), 1) - self.assertEqual(expected_metrics, json.loads(transformed_metric[0])) - - @mock.patch('oslo_utils.timeutils.utcnow_ts') - def test_multiple_metrics(self, time): - time.return_value = 514862580 - tenant_id = 222 - region = 'default' - metrics = ['example.test1', 'example.test2'] - expected_metrics = [] - for metric in metrics: - expected_metrics.append({'metric': metric, - 'creation_time': 514862580, - 'meta': - {'region': 'default', - 'tenantId': 222}}) - transformed_metrics = transform(metrics, tenant_id, region) - self.assertIsInstance(transformed_metrics, list) - self.assertEqual(len(transformed_metrics), len(metrics)) - for transformed_metric in transformed_metrics: - self.assertIn(json.loads(transformed_metric), expected_metrics) diff --git a/monasca_api/tests/test_metrics.py b/monasca_api/tests/test_metrics.py deleted file mode 100644 index 5b20237f9..000000000 --- a/monasca_api/tests/test_metrics.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright 2019 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json - -import falcon -import fixtures -from oslo_config import cfg - -from monasca_api.tests import base -from monasca_api.v2.reference import metrics - - -CONF = cfg.CONF - -TENANT_ID = u"fedcba9876543210fedcba9876543210" - - -class TestMetrics(base.BaseApiTestCase): - def setUp(self): - super(TestMetrics, self).setUp() - - self.useFixture(fixtures.MockPatch( - 'monasca_api.common.messaging.kafka_publisher.KafkaPublisher' - )) - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - # [messaging] - self.conf_override( - driver='monasca_api.common.messaging.' - 'kafka_publisher:KafkaPublisher', - group='messaging') - - self.metrics_resource = metrics.Metrics() - - self.app.add_route('/v2.0/metrics', - self.metrics_resource) - - def test_list_metrics(self): - expected_elements = \ - {'elements': [{'id': '0', - 'name': 'mon.fake_metric', - 'dimensions': - {'hostname': 'host0', - 'db': 'vegeta'}}, - {'id': '1', - 'name': 'cpu.idle_perc', - 'dimensions': - {'hostname': 'host0', - 'db': 'vegeta'}} - ]} - - return_value = self.metrics_repo_mock.return_value - return_value.list_metrics.return_value = expected_elements['elements'] - - response = self.simulate_request(path='/v2.0/metrics', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_send_metrics(self): - request_body = { - "name": "mon.fake_metric", - "dimensions": { - "hostname": "host0", - "db": "vegeta" - }, - "timestamp": 1405630174123, - "value": 1.0, - "value_meta": { - "key1": "value1", - "key2": "value2" - }} - response = self.simulate_request(path='/v2.0/metrics', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - body=json.dumps(request_body), - method='POST') - self.assertEqual(falcon.HTTP_204, response.status) - - def test_send_incorrect_metric(self): - request_body = { - "name": "mon.fake_metric", - "dimensions": 'oh no', - "timestamp": 1405630174123, - "value": 1.0} - response = self.simulate_request(path='/v2.0/metrics', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - body=json.dumps(request_body), - method='POST') - self.assertEqual(falcon.HTTP_422, response.status) - - -class TestMeasurements(base.BaseApiTestCase): - def setUp(self): - super(TestMeasurements, self).setUp() - - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - self.metrics_resource = metrics.MetricsMeasurements() - - self.app.add_route('/v2.0/metrics/measurements', - self.metrics_resource) - - def test_get_measurements(self): - expected_measurements = \ - {'elements': [ - {u'name': u'mon.fake_metric', - u'columns': [u'timestamp', - u'value', - u'value_meta'], - u'id': '0', - u'dimensions': { - u'hostname': u'devstack', - u'service': u'monitoring'}, - u'measurements': - [[u'2019-03-12T12:37:10.106Z', 98.5, {}], - [u'2019-03-12T12:37:23.012Z', 98.8, {}], - [u'2019-03-12T12:37:38.031Z', 68.7, {}], - [u'2019-03-12T12:37:53.046Z', 55.3, {}], - [u'2019-03-12T12:38:08.048Z', 52.8, {}]]}]} - - return_value = self.metrics_repo_mock.return_value - return_value.measurement_list.return_value = expected_measurements['elements'] - response = self.simulate_request(path='/v2.0/metrics/measurements', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='name=mon.fake_metric&' - 'start_time=2015-03-01T00:00:01.000Z') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_measurements)) - - def test_get_measurements_invalid_metric_name(self): - response = self.simulate_request(path='/v2.0/metrics/measurements', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='name=z' - 'start_time=2015-03-01T00:00:01.000Z') - self.assertEqual(falcon.HTTP_422, response.status) - - def test_get_measurements_missing_start_time(self): - response = self.simulate_request(path='/v2.0/metrics/measurements', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='name=mon.fake_metric') - self.assertEqual(falcon.HTTP_422, response.status) - - def test_get_measurements_missing_name(self): - response = self.simulate_request(path='/v2.0/metrics/measurements', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='start_time=2015-03-01T00:00:01.000Z') - self.assertEqual(falcon.HTTP_422, response.status) - - -class TestStatistics(base.BaseApiTestCase): - def setUp(self): - super(TestStatistics, self).setUp() - - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - self.metrics_resource = metrics.MetricsStatistics() - - self.app.add_route('/v2.0/metrics/statistics', - self.metrics_resource) - - def test_get_statistics(self): - expected_statistics = \ - {u'elements': [{u'name': u'mon.fake_metric', - u'columns': - [u'timestamp', - u'avg'], - u'id': '0', - u'dimensions': - {u'hostname': u'devstack', - u'service': u'monitoring'}, - u'statistics': - [[u'2019-03-12T12:35:00Z', 49.25], - [u'2019-03-12T12:40:00Z', 28.25], - [u'2019-03-12T12:45:00Z', 27.5], - [u'2019-03-12T12:50:00Z', 27], - [u'2019-03-12T12:55:00Z', 28]]}]} - return_value = self.metrics_repo_mock.return_value - return_value.metrics_statistics.return_value = expected_statistics['elements'] - response = self.simulate_request(path='/v2.0/metrics/statistics', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='name=mon.fake_metric&' - 'start_time=2015-03-01T00:00:01.000Z&' - 'statistics=avg') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_statistics)) - - -class TestMetricsNames(base.BaseApiTestCase): - def setUp(self): - super(TestMetricsNames, self).setUp() - - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - self.metrics_resource = metrics.MetricsNames() - - self.app.add_route('/v2.0/metrics/names', - self.metrics_resource) - - def test_get_metrics_names(self): - expected_metrics_names = \ - {u'elements': [ - {u'name': u'cpu.frequency_mhz'}, - {u'name': u'cpu.idle_perc'}, - {u'name': u'cpu.idle_time'}, - {u'name': u'cpu.percent'}, - {u'name': u'cpu.stolen_perc'}, - {u'name': u'cpu.system_perc'}, - {u'name': u'cpu.system_time'}]} - - return_value = self.metrics_repo_mock.return_value - return_value.list_metric_names.return_value = expected_metrics_names['elements'] - response = self.simulate_request(path='/v2.0/metrics/names', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_metrics_names)) - - -class TestDimensionNames(base.BaseApiTestCase): - def setUp(self): - super(TestDimensionNames, self).setUp() - - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - self.metrics_resource = metrics.DimensionNames() - - self.app.add_route('/v2.0/metrics/dimensions/names', - self.metrics_resource) - - def test_get_dimension_names(self): - expected_dimension_names = \ - {u'elements': [ - {u'dimension_name': u'component'}, - {u'dimension_name': u'consumer_group'}, - {u'dimension_name': u'device'}, - {u'dimension_name': u'hostname'}, - {u'dimension_name': u'mode'}]} - - return_value = self.metrics_repo_mock.return_value - return_value.list_dimension_names.return_value = expected_dimension_names['elements'] - response = self.simulate_request(path='/v2.0/metrics/dimensions/names', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_dimension_names)) - - -class TestDimensionValues(base.BaseApiTestCase): - def setUp(self): - super(TestDimensionValues, self).setUp() - - self.metrics_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.influxdb.metrics_repository.MetricsRepository' - )).mock - - self.metrics_resource = metrics.DimensionValues() - - self.app.add_route('/v2.0/metrics/dimensions/values', - self.metrics_resource) - - def test_get_dimension_values(self): - expected_dimension_names = \ - {u'elements': [ - {u'dimension_value': u'dummy_dimension_value'}]} - - return_value = self.metrics_repo_mock.return_value - return_value.list_dimension_values.return_value = expected_dimension_names['elements'] - response = self.simulate_request(path='/v2.0/metrics/dimensions/values', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET', - query_string='dimension_name=dummy_dimension_name') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_dimension_names)) diff --git a/monasca_api/tests/test_metrics_db_health_check.py b/monasca_api/tests/test_metrics_db_health_check.py deleted file mode 100644 index 93423d3fa..000000000 --- a/monasca_api/tests/test_metrics_db_health_check.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# (C) Copyright 2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from monasca_common.simport import simport - -from monasca_api import config -from monasca_api.healthcheck import metrics_db_check as tdc -from monasca_api.tests import base - -CONF = config.CONF - - -class TestMetricsDbHealthCheck(base.BaseTestCase): - - @mock.patch("monasca_api.healthcheck.metrics_db_check.simport") - def test_health_check(self, simport_mock): - metrics_repo_mock = simport_mock.load.return_value - metrics_repo_mock.check_status.return_value = (True, 'OK') - db_health = tdc.MetricsDbCheck() - - result = db_health.health_check() - - self.assertTrue(result.healthy) - - self.assertEqual(result.message, 'OK') - - @mock.patch("monasca_api.healthcheck.metrics_db_check.simport") - def test_health_check_failed(self, simport_mock): - metrics_repo_mock = simport_mock.load.return_value - metrics_repo_mock.check_status.return_value = (False, 'Error') - db_health = tdc.MetricsDbCheck() - - result = db_health.health_check() - - self.assertFalse(result.healthy) - self.assertEqual(result.message, 'Error') - - @mock.patch("monasca_api.healthcheck.metrics_db_check.simport") - def test_health_check_load_failed(self, simport_mock): - simport_mock.load.side_effect = simport.ImportFailed( - "Failed to import 'foo'. Error: bar") - self.assertRaises(simport.ImportFailed, tdc.MetricsDbCheck) diff --git a/monasca_api/tests/test_models_repository.py b/monasca_api/tests/test_models_repository.py deleted file mode 100644 index 55ef038fc..000000000 --- a/monasca_api/tests/test_models_repository.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright 2015 Cray -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from sqlalchemy import select, MetaData, text, asc - -from monasca_api.common.repositories.sqla import models -from monasca_api.tests import base - - -class TestModelsDB(base.BaseTestCase): - - def setUp(self): - super(TestModelsDB, self).setUp() - metadata = MetaData() - - md = models.create_md_model(metadata) - gc_columns = [md.c.name + text("'='") + md.c.value] - self.group_concat_md = ( - select(md.c.dimension_set_id, - models.group_concat(gc_columns).label('dimensions')) - .select_from(md) - .group_by(md.c.dimension_set_id)) - - self.group_concat_md_order = ( - select(md.c.dimension_set_id, - models.group_concat(gc_columns, - order_by=[md.c.name.asc()]).label('dimensions')) - .select_from(md) - .group_by(md.c.dimension_set_id)) - - self.order_by_field = (select(md.c.dimension_set_id) .select_from(md) .order_by( - asc(models.field_sort(md.c.dimension_set_id, map(text, ["'A'", "'B'", "'C'"]))))) - - def test_oracle(self): - from sqlalchemy.dialects import oracle - dialect = oracle.dialect() - query = str(self.group_concat_md.compile(dialect=dialect)) - - expected = ('''SELECT metric_dimension.dimension_set_id, LISTAGG(metric_dimension.name ''' - '''|| '=' || metric_dimension.value, ',') WITHIN GROUP (ORDER BY ''' - '''metric_dimension.name || '=' || metric_dimension.value) AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - query = str(self.group_concat_md_order.compile(dialect=dialect)) - - expected = ('''SELECT metric_dimension.dimension_set_id, LISTAGG(metric_dimension.name ''' - '''|| '=' || metric_dimension.value, ',') WITHIN GROUP (ORDER BY ''' - '''metric_dimension.name ASC) AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - expected = ( - """SELECT metric_dimension.dimension_set_id \n""" - """FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'""" - """ THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN""" - """ metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""") - query = str(self.order_by_field.compile(dialect=dialect)) - self.assertEqual(expected, query) - - def test_postgres(self): - from sqlalchemy.dialects import postgresql as diale_ - dialect = diale_.dialect() - query = str(self.group_concat_md.compile(dialect=dialect)) - - expected = ( - '''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name ''' - '''|| '=' || metric_dimension.value, ',' ) AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - query = str(self.group_concat_md_order.compile(dialect=dialect)) - - expected = ( - '''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name ''' - '''|| '=' || metric_dimension.value, ',' ORDER BY metric_dimension.name ASC) ''' - '''AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - expected = ( - """SELECT metric_dimension.dimension_set_id \n""" - """FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'""" - """ THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN""" - """ metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""") - query = str(self.order_by_field.compile(dialect=dialect)) - self.assertEqual(expected, query) - -# def test_sybase(self): -# from sqlalchemy.dialects import sybase as diale_ -# dialect = diale_.dialect() -# query = str(self.group_concat_md.compile(dialect=dialect)) - -# expected = ( -# '''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' ''' -# '''|| metric_dimension.value, ',') AS dimensions ''' -# ''' -# FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') -# self.assertEqual(expected, query) - -# query = str(self.group_concat_md_order.compile(dialect=dialect)) - -# expected = ( -# '''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' ''' -# '''|| metric_dimension.value, ',') AS dimensions ''' -# ''' -# FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') -# self.assertEqual(expected, query) - -# expected = ( -# """SELECT metric_dimension.dimension_set_id \n""" -# """FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'""" -# """ THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN""" -# """ metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""") -# query = str(self.order_by_field.compile(dialect=dialect)) -# self.assertEqual(expected, query) - - def test_mysql(self): - from sqlalchemy.dialects import mysql as diale_ - dialect = diale_.dialect() - query = str(self.group_concat_md.compile(dialect=dialect)) - - expected = ( - '''SELECT metric_dimension.dimension_set_id, ''' - '''GROUP_CONCAT(concat(metric_dimension.name, ''' - ''''=', metric_dimension.value) SEPARATOR ',') AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - query = str(self.group_concat_md_order.compile(dialect=dialect)) - - expected = ( - '''SELECT metric_dimension.dimension_set_id, ''' - '''GROUP_CONCAT(concat(metric_dimension.name, ''' - ''''=', metric_dimension.value) ORDER BY metric_dimension.name ASC ''' - '''SEPARATOR ',') AS dimensions ''' - ''' -FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') - self.assertEqual(expected, query) - - expected = ( - '''SELECT metric_dimension.dimension_set_id \n''' - '''FROM metric_dimension ''' - '''ORDER BY FIELD(metric_dimension.dimension_set_id, 'A', 'B', 'C') ASC''') - - query = str(self.order_by_field.compile(dialect=dialect)) - self.assertEqual(expected, query) diff --git a/monasca_api/tests/test_nm_repository.py b/monasca_api/tests/test_nm_repository.py deleted file mode 100644 index 203ef2ff2..000000000 --- a/monasca_api/tests/test_nm_repository.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright 2015 Cray -# Copyright 2016-2017 FUJITSU LIMITED -# (C) Copyright 2016 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -import fixtures -from oslo_config import cfg -from oslo_config import fixture as fixture_config -from oslo_db.sqlalchemy.engines import create_engine -from sqlalchemy import delete, MetaData, insert, inspect, bindparam - -from monasca_api.common.repositories.sqla import models -from monasca_api.tests import base - -CONF = cfg.CONF - - -class TestNotificationMethodRepoDB(base.BaseTestCase): - @classmethod - def setUpClass(cls): - engine = create_engine('sqlite://') - - qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read() - sconn = engine.raw_connection() - c = sconn.cursor() - c.executescript(qry) - sconn.commit() - c.close() - cls.engine = engine - - def _fake_engine_from_config(*args, **kw): - return cls.engine - cls.fixture = fixtures.MonkeyPatch( - 'sqlalchemy.create_engine', _fake_engine_from_config) - cls.fixture.setUp() - - metadata = MetaData() - cls.nm = models.create_nm_model(metadata) - cls._delete_nm_query = delete(cls.nm) - cls._insert_nm_query = (insert(cls.nm) - .values( - id=bindparam('id'), - tenant_id=bindparam('tenant_id'), - name=bindparam('name'), - type=bindparam('type'), - address=bindparam('address'), - period=bindparam('period'), - created_at=bindparam('created_at'), - updated_at=bindparam('updated_at'))) - - @classmethod - def tearDownClass(cls): - cls.fixture.cleanUp() - if hasattr(CONF, 'sql_engine'): - delattr(CONF, 'sql_engine') - - def setUp(self): - super(TestNotificationMethodRepoDB, self).setUp() - - self._fixture_config = self.useFixture( - fixture_config.Config(cfg.CONF)) - self._fixture_config.config(connection='sqlite://', - group='database') - - from monasca_api.common.repositories.sqla import notifications_repository as nr - - self.repo = nr.NotificationsRepository() - self.created_at = datetime.datetime.now() - self.updated_at = datetime.datetime.now() - self.default_nms = [{'id': '123', - 'tenant_id': '444', - 'name': 'MyEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'period': 0, - 'created_at': self.created_at, - 'updated_at': self.updated_at}, - {'id': '124', - 'tenant_id': '444', - 'name': 'OtherEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'period': 0, - 'created_at': self.created_at, - 'updated_at': self.updated_at}, - {'id': '125', - 'tenant_id': '444', - 'name': 'AEmail', - 'type': 'EMAIL', - 'address': 'a@b', - 'period': 0, - 'created_at': self.created_at, - 'updated_at': self.updated_at} - ] - - with self.engine.connect() as conn: - conn.execute(self._delete_nm_query) - conn.execute(self._insert_nm_query, self.default_nms) - # TODO(thuvh) find better solution - conn.commit() - - def test_fixture_and_setup(self): - class A(object): - def __init__(self): - from sqlalchemy import create_engine - self.engine = create_engine(None) - - a = A() - expected_list_tables = ['alarm', - 'alarm_action', - 'alarm_definition', - 'alarm_definition_severity', - 'alarm_metric', - 'alarm_state', - 'metric_definition', - 'metric_definition_dimensions', - 'metric_dimension', - 'notification_method', - 'notification_method_type', - 'sub_alarm', - 'sub_alarm_definition', - 'sub_alarm_definition_dimension'] - - self.assertEqual(self.engine, a.engine) - inspection = inspect(self.engine) - self.assertEqual(inspection.get_table_names(), expected_list_tables) - - def test_should_create(self): - from monasca_api.common.repositories import exceptions - nmA = self.repo.create_notification('555', - 'MyEmail', - 'EMAIL', - 'a@b', - 0) - nmB = self.repo.list_notification('555', nmA) - - self.assertEqual(nmA, nmB['id']) - - self.assertRaises(exceptions.AlreadyExistsException, - self.repo.create_notification, - '555', - 'MyEmail', - 'EMAIL', - 'a@b', - 0) - - def test_should_exists(self): - from monasca_api.common.repositories import exceptions - self.assertTrue(self.repo.list_notification("444", "123")) - self.assertRaises(exceptions.DoesNotExistException, - self.repo.list_notification, "444", "1234") - self.assertRaises(exceptions.DoesNotExistException, - self.repo.list_notification, "333", "123") - - def test_should_find_by_id(self): - nm = self.repo.list_notification('444', '123') - self.assertEqual(nm['id'], '123') - self.assertEqual(nm['type'], 'EMAIL') - self.assertEqual(nm['address'], 'a@b') - - def test_should_find_by_name(self): - nms = self.repo.find_notification_by_name('444', 'MyEmail') - self.assertEqual(nms['id'], '123') - self.assertEqual(nms['tenant_id'], '444') - self.assertEqual(nms['type'], 'EMAIL') - self.assertEqual(nms['address'], 'a@b') - self.assertEqual(nms['period'], 0) - self.assertEqual(nms['created_at'], self.created_at) - self.assertEqual(nms['updated_at'], self.updated_at) - - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.find_notification_by_name, '444', '222222MyEmail22222') - - def test_should_find(self): - nms = self.repo.list_notifications('444', None, None, 2) - self.assertEqual(nms, self.default_nms) - nms = self.repo.list_notifications('444', None, 3, 1) - self.assertEqual(nms, []) - - def test_should_find_and_sort(self): - nms = self.repo.list_notifications('444', ['id'], None, 3) - self.assertEqual(self.default_nms, nms) - - nms = self.repo.list_notifications('444', ['name'], None, 3) - expected = sorted(self.default_nms, key=lambda k: k['name']) - self.assertEqual(expected, nms) - - def test_update(self): - import copy - self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc', 0) - nm = self.repo.list_notification('444', '123') - new_nm = copy.deepcopy(self.default_nms[0]) - new_nm['name'] = b'Foo' - new_nm['type'] = b'EMAIL' - new_nm['address'] = b'abc' - new_nm['created_at'] = nm['created_at'] - new_nm['updated_at'] = nm['updated_at'] - self.assertEqual(nm, new_nm) - from monasca_api.common.repositories import exceptions - self.assertRaises(exceptions.DoesNotExistException, - self.repo.update_notification, - 'no really id', - 'no really tenant', - '', - '', - '', - 0) - - def test_should_delete(self): - from monasca_api.common.repositories import exceptions - self.repo.delete_notification('444', '123') - self.assertRaises(exceptions.DoesNotExistException, - self.repo.list_notification, '444', '123') - self.assertRaises(exceptions.DoesNotExistException, - self.repo.delete_notification, 'no really tenant', '123') - - def test_should_update_duplicate_with_same_values(self): - import copy - self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc', 0) - self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc', 0) - nm = self.repo.list_notification('444', '123') - new_nm = copy.deepcopy(self.default_nms[0]) - new_nm['name'] = b'Foo' - new_nm['type'] = b'EMAIL' - new_nm['address'] = b'abc' - new_nm['created_at'] = nm['created_at'] - new_nm['updated_at'] = nm['updated_at'] - self.assertEqual(nm, new_nm) diff --git a/monasca_api/tests/test_notifications.py b/monasca_api/tests/test_notifications.py deleted file mode 100644 index a5e283c42..000000000 --- a/monasca_api/tests/test_notifications.py +++ /dev/null @@ -1,604 +0,0 @@ -# Copyright 2019 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import json - -import falcon.testing -import fixtures -from oslo_config import cfg - -from monasca_api.tests import base -from monasca_api.v2.reference import notifications -from monasca_api.v2.reference import notificationstype - - -CONF = cfg.CONF - -TENANT_ID = u"fedcba9876543210fedcba9876543210" - - -class TestNotifications(base.BaseApiTestCase): - def setUp(self): - super(TestNotifications, self).setUp() - - self.conf_override( - notifications_driver='monasca_api.common.repositories.sqla.' - 'notifications_repository:NotificationsRepository', - group='repositories') - - self.notifications_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.notifications_repository.NotificationsRepository' - )).mock - - self.notifications_type_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.' - 'notification_method_type_repository.NotificationMethodTypeRepository' - )).mock - - self.notification_resource = notifications.Notifications() - self.app.add_route( - '/v2.0/notification-methods', self.notification_resource) - self.app.add_route( - '/v2.0/notification-methods/{notification_method_id}', self.notification_resource) - - def test_create_notifications(self): - request_body = \ - { - "name": "Name", - "type": "EMAIL", - "address": "john@doe.com" - } - return_value = self.notifications_repo_mock.return_value - return_value.create_notification.return_value = 'a9362cc5-c78e-4674-bd39-4639fc274a20' - return_value.find_notification_by_name.return_value = {} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='POST', - body=json.dumps(request_body)) - self.assertEqual(falcon.HTTP_201, response.status) - - def test_create_notifications_with_incorrect_type(self): - request_body = \ - { - "name": "Name", - "type": "MagicTYPE", - "address": "john@doe.com" - } - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = {} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='POST', - body=json.dumps(request_body)) - self.assertEqual(falcon.HTTP_400, response.status) - - def test_create_notifications_when_name_is_taken(self): - request_body = \ - { - "name": "Name", - "type": "EMAIL", - "address": "john@doe.com" - } - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'Name', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='POST', - body=json.dumps(request_body)) - self.assertEqual(falcon.HTTP_409, response.status) - - def test_list_notifications(self): - expected_elements = \ - {'elements': [ - {'name': u'notification', - 'id': u'1', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/notification-methods/1', - 'rel': 'self'}]}]} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notifications.return_value = \ - [{'name': u'notification', - 'id': u'1', - 'tenant_id': u'4199b031d5fa401abf9afaf7e58890b7', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)}] - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_list_notifications_with_sort_by(self): - expected_elements = \ - {'elements': [ - {'name': u'notification', - 'id': u'1', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/notification-methods/1', - 'rel': 'self'}]}]} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notifications.return_value = \ - [{'name': u'notification', - 'id': u'1', - 'tenant_id': u'4199b031d5fa401abf9afaf7e58890b7', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)}] - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - query_string='sort_by=name', - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_list_notifications_with_incorrect_sort_by(self): - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - query_string='sort_by=random_string', - method='GET') - self.assertEqual(falcon.HTTP_422, response.status) - - def test_list_notifications_with_offset(self): - expected_elements = \ - {'elements': [ - {'name': u'notification', - 'id': u'1', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'links': [{ - 'href': 'http://falconframework.org/v2.0/notification-methods/1', - 'rel': 'self'}]}]} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notifications.return_value = \ - [{'name': u'notification', - 'id': u'1', - 'tenant_id': u'4199b031d5fa401abf9afaf7e58890b7', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)}] - response = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - query_string='offset=10', - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_list_notifications_with_incorrect_offset(self): - return_value = self.notifications_repo_mock.return_value - return_value.list_notifications.return_value = \ - [{'name': u'notification', - 'id': u'1', - 'tenant_id': u'4199b031d5fa401abf9afaf7e58890b7', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)}] - result = self.simulate_request(path='/v2.0/notification-methods', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - query_string='offset=ten', - method='GET') - self.assertEqual(falcon.HTTP_422, result.status) - - def test_get_notification_with_id(self): - expected_elements = \ - {'name': u'notification', - 'id': u'1', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com'} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notification.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'4199b031d5fa401abf9afaf7e58890b7', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_delete_notification(self): - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID}, - method='DELETE') - self.assertEqual(falcon.HTTP_204, response.status) - - def test_put_notification(self): - expected_elements = \ - {"id": "1", - "name": "shy_name", - "type": "EMAIL", - "address": "james@bond.com", - "period": 0} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - request_body = \ - {"name": "shy_name", - "type": "EMAIL", - "address": "james@bond.com", - "period": 0} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PUT', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_patch_notification_all_fields(self): - expected_elements = \ - {"id": "1", - "name": "shy_name", - "type": "EMAIL", - "address": "james@bond.com", - "period": 0} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'a@b.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - request_body = \ - {"name": "shy_name", - "type": "EMAIL", - "address": "james@bond.com", - "period": 0} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PATCH', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_patch_notification_name_fields(self): - expected_elements = \ - {"id": "1", - "name": "shy_name", - "type": "EMAIL", - "address": "james@bond.com", - "period": 0} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notification.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com'} - - request_body = {"name": "shy_name"} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PATCH', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_patch_notification_type_fields(self): - expected_elements = \ - {"id": "1", - "name": "notification", - "type": "PAGERDUTY", - "address": "james@bond.com", - "period": 0} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notification.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com'} - - request_body = {"type": "PAGERDUTY"} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PATCH', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_patch_notification_address_fields(self): - expected_elements = \ - {"id": "1", - "name": "notification", - "type": "EMAIL", - "address": "a@b.com", - "period": 0} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notification.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'EMAIL', - 'period': 0, - 'address': u'james@bond.com'} - - request_body = {"address": "a@b.com"} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PATCH', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - def test_patch_notification_period_fields(self): - expected_elements = \ - {"id": "1", - "name": "notification", - "type": "WEBHOOK", - "address": "http://jamesbond.com", - "period": 60} - - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - return_value = self.notifications_repo_mock.return_value - return_value.find_notification_by_name.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'WEBHOOK', - 'period': 0, - 'address': u'http://jamesbond.com', - 'created_at': datetime.datetime(2019, 3, 22, 9, 35, 25), - 'updated_at': datetime.datetime(2019, 3, 22, 9, 35, 25)} - - return_value = self.notifications_repo_mock.return_value - return_value.list_notification.return_value = \ - {'name': u'notification', - 'id': u'1', - 'tenant_id': u'444', - 'type': u'WEBHOOK', - 'period': 0, - 'address': u'http://jamesbond.com'} - - request_body = {"period": 60} - - response = self.simulate_request(path='/v2.0/notification-methods/1', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='PATCH', - body=json.dumps(request_body)) - - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_elements)) - - -class TestNotificationsType(base.BaseApiTestCase): - def setUp(self): - super(TestNotificationsType, self).setUp() - - self.conf_override( - notifications_driver='monasca_api.common.repositories.sqla.' - 'notifications_repository:NotificationsRepository', - group='repositories') - - self.notifications_type_repo_mock = self.useFixture(fixtures.MockPatch( - 'monasca_api.common.repositories.sqla.' - 'notification_method_type_repository.NotificationMethodTypeRepository' - )).mock - - self.notification_resource = notificationstype.NotificationsType() - self.app.add_route( - '/v2.0/notification-methods/types', self.notification_resource) - - def test_get_notification_types(self): - expected_notification_types = \ - {'elements': [ - {'type': 'EMAIL'}, - {'type': 'PAGERDUTY'}, - {'type': 'WEBHOOK'}]} - return_value = self.notifications_type_repo_mock.return_value - return_value.list_notification_method_types.return_value = \ - [u'EMAIL', - u'PAGERDUTY', - u'WEBHOOK'] - - response = self.simulate_request(path='/v2.0/notification-methods/types', - headers={'X-Roles': - CONF.security.default_authorized_roles[0], - 'X-Tenant-Id': TENANT_ID, - 'Content-Type': 'application/json'}, - method='GET') - self.assertEqual(falcon.HTTP_200, response.status) - self.assertThat(response, base.RESTResponseEquals(expected_notification_types)) diff --git a/monasca_api/tests/test_policy.py b/monasca_api/tests/test_policy.py deleted file mode 100644 index c234f70ee..000000000 --- a/monasca_api/tests/test_policy.py +++ /dev/null @@ -1,254 +0,0 @@ -# Copyright 2016-2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from falcon import testing - -from oslo_context import context -from oslo_policy import policy as os_policy - -from monasca_api.api.core import request -from monasca_api.common.policy import policy_engine as policy -from monasca_api.policies import roles_list_to_check_str -from monasca_api.tests import base - - -class TestPolicyFileCase(base.BaseTestCase): - def setUp(self): - super(TestPolicyFileCase, self).setUp() - self.context = context.RequestContext(user='fake', - project_id='fake', - roles=['fake']) - self.target = {'project_id': 'fake'} - - def test_modified_policy_reloads(self): - tmp_file = \ - self.create_tempfiles(files=[('policies', '{}')], ext='.yaml')[0] - base.BaseTestCase.conf_override(policy_file=tmp_file, - group='oslo_policy') - - policy.reset() - policy.init() - action = 'example:test' - rule = os_policy.RuleDefault(action, '') - policy._ENFORCER.register_defaults([rule]) - - with open(tmp_file, 'w') as policy_file: - policy_file.write('{"example:test": ""}') - policy.authorize(self.context, action, self.target) - - with open(tmp_file, 'w') as policy_file: - policy_file.write('{"example:test": "!"}') - policy._ENFORCER.load_rules(True) - self.assertRaises(os_policy.PolicyNotAuthorized, policy.authorize, - self.context, action, self.target) - - -class TestPolicyCase(base.BaseTestCase): - def setUp(self): - super(TestPolicyCase, self).setUp() - rules = [ - os_policy.RuleDefault("true", "@"), - os_policy.RuleDefault("example:allowed", "@"), - os_policy.RuleDefault("example:denied", "!"), - os_policy.RuleDefault("example:lowercase_monasca_user", - "role:monasca_user or role:sysadmin"), - os_policy.RuleDefault("example:uppercase_monasca_user", - "role:MONASCA_USER or role:sysadmin"), - ] - policy.reset() - policy.init() - policy._ENFORCER.register_defaults(rules) - - def test_authorize_nonexist_action_throws(self): - action = "example:noexist" - ctx = request.Request( - testing.create_environ( - path="/", - headers={ - "X_USER_ID": "fake", - "X_PROJECT_ID": "fake", - "X_ROLES": "member" - } - ) - ) - self.assertRaises(os_policy.PolicyNotRegistered, policy.authorize, - ctx.context, action, {}) - - def test_authorize_bad_action_throws(self): - action = "example:denied" - ctx = request.Request( - testing.create_environ( - path="/", - headers={ - "X_USER_ID": "fake", - "X_PROJECT_ID": "fake", - "X_ROLES": "member" - } - ) - ) - self.assertRaises(os_policy.PolicyNotAuthorized, policy.authorize, - ctx.context, action, {}) - - def test_authorize_bad_action_no_exception(self): - action = "example:denied" - ctx = request.Request( - testing.create_environ( - path="/", - headers={ - "X_USER_ID": "fake", - "X_PROJECT_ID": "fake", - "X_ROLES": "member" - } - ) - ) - result = policy.authorize(ctx.context, action, {}, False) - self.assertFalse(result) - - def test_authorize_good_action(self): - action = "example:allowed" - ctx = request.Request( - testing.create_environ( - path="/", - headers={ - "X_USER_ID": "fake", - "X_PROJECT_ID": "fake", - "X_ROLES": "member" - } - ) - ) - result = policy.authorize(ctx.context, action, {}, False) - self.assertTrue(result) - - def test_ignore_case_role_check(self): - lowercase_action = "example:lowercase_monasca_user" - uppercase_action = "example:uppercase_monasca_user" - - monasca_user_context = request.Request( - testing.create_environ( - path="/", - headers={ - "X_USER_ID": "monasca_user", - "X_PROJECT_ID": "fake", - "X_ROLES": "MONASCA_user" - } - ) - ) - self.assertTrue(policy.authorize(monasca_user_context.context, - lowercase_action, - {})) - self.assertTrue(policy.authorize(monasca_user_context.context, - uppercase_action, - {})) - - -class RegisteredPoliciesTestCase(base.BaseTestCase): - def __init__(self, *args, **kwds): - super(RegisteredPoliciesTestCase, self).__init__(*args, **kwds) - self.agent_roles = ['monasca-agent'] - self.readonly_roles = ['monasca-read-only-user'] - self.default_roles = ['monasca-user'] - self.delegate_roles = ['admin'] - - def test_alarms_policies_roles(self): - alarms_policies = { - 'api:alarms:definition:post': self.default_roles, - 'api:alarms:definition:get': - self.default_roles + self.readonly_roles, - 'api:alarms:definition:put': self.default_roles, - 'api:alarms:definition:patch': self.default_roles, - 'api:alarms:definition:delete': self.default_roles, - 'api:alarms:put': self.default_roles, - 'api:alarms:patch': self.default_roles, - 'api:alarms:delete': self.default_roles, - 'api:alarms:get': self.default_roles + self.readonly_roles, - 'api:alarms:count': self.default_roles + self.readonly_roles, - 'api:alarms:state_history': self.default_roles + self.readonly_roles - } - - self._assert_rules(alarms_policies) - - def test_metrics_policies_roles(self): - metrics_policies = { - 'api:metrics:get': self.default_roles + self.readonly_roles, - 'api:metrics:post': self.agent_roles + self.default_roles, - 'api:metrics:dimension:values': - self.default_roles + self.readonly_roles, - 'api:metrics:dimension:names': - self.default_roles + self.readonly_roles - - } - self._assert_rules(metrics_policies) - - def test_notifications_policies_roles(self): - notifications_policies = { - 'api:notifications:put': self.default_roles, - 'api:notifications:patch': self.default_roles, - 'api:notifications:delete': self.default_roles, - 'api:notifications:get': self.default_roles + self.readonly_roles, - 'api:notifications:post': self.default_roles, - 'api:notifications:type': self.default_roles + self.readonly_roles, - - } - self._assert_rules(notifications_policies) - - def test_versions_policies_roles(self): - versions_policies = { - 'api:versions': ['any_rule!'] - - } - self._assert_rules(versions_policies) - - def test_healthcheck_policies_roles(self): - healthcheck_policies = { - 'api:healthcheck': ['any_rule!'] - } - self._assert_rules(healthcheck_policies) - - def test_delegate_policies_roles(self): - delegate_policies = { - 'api:delegate': self.delegate_roles - } - self._assert_rules(delegate_policies) - - def _assert_rules(self, policies_list): - for policy_name in policies_list: - registered_rule = policy.get_rules()[policy_name] - if hasattr(registered_rule, 'rules'): - self.assertEqual(len(registered_rule.rules), - len(policies_list[policy_name])) - for role in policies_list[policy_name]: - ctx = self._get_request_context(role) - self.assertTrue(policy.authorize(ctx.context, - policy_name, - {}) - ) - - @staticmethod - def _get_request_context(role): - return request.Request( - testing.create_environ( - path='/', - headers={'X_ROLES': role} - ) - ) - - -class PolicyUtilsTestCase(base.BaseTestCase): - def test_roles_list_to_check_str(self): - self.assertEqual(roles_list_to_check_str(['test_role']), 'role:test_role') - self.assertEqual(roles_list_to_check_str(['role1', 'role2', 'role3']), - 'role:role1 or role:role2 or role:role3') - self.assertEqual(roles_list_to_check_str(['@']), '@') - self.assertEqual(roles_list_to_check_str(['role1', '@', 'role2']), - 'role:role1 or @ or role:role2') diff --git a/monasca_api/tests/test_query_helpers.py b/monasca_api/tests/test_query_helpers.py deleted file mode 100644 index a77929529..000000000 --- a/monasca_api/tests/test_query_helpers.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2015 Cray Inc. All Rights Reserved. -# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest.mock import Mock - -from monasca_api.tests import base -from monasca_api.v2.reference import helpers - - -class TestGetQueryDimension(base.BaseTestCase): - - def test_no_dimensions(self): - req = Mock() - - req.query_string = "foo=bar" - - result = helpers.get_query_dimensions(req) - - self.assertEqual(result, {}) - - def test_one_dimensions(self): - req = Mock() - - req.query_string = "foo=bar&dimensions=Dimension:Value" - - result = helpers.get_query_dimensions(req) - - self.assertEqual(result, {"Dimension": "Value"}) - - def test_comma_sep_dimensions(self): - req = Mock() - - req.query_string = ("foo=bar&" - "dimensions=Dimension:Value,Dimension-2:Value-2") - - result = helpers.get_query_dimensions(req) - - self.assertEqual( - result, {"Dimension": "Value", "Dimension-2": "Value-2"}) - - def test_multiple_dimension_params(self): - req = Mock() - - req.query_string = ("foo=bar&" - "dimensions=Dimension:Value&" - "dimensions=Dimension-2:Value-2") - - result = helpers.get_query_dimensions(req) - - self.assertEqual( - result, {"Dimension": "Value", "Dimension-2": "Value-2"}) - - def test_multiple_dimension_params_with_comma_sep_dimensions(self): - req = Mock() - - req.query_string = ("foo=bar&" - "dimensions=Dimension-3:Value-3&" - "dimensions=Dimension:Value,Dimension-2:Value-2") - - result = helpers.get_query_dimensions(req) - - self.assertEqual( - result, {"Dimension": "Value", - "Dimension-2": "Value-2", - "Dimension-3": "Value-3"}) - - def test_dimension_no_value(self): - req = Mock() - req.query_string = ("foo=bar&dimensions=Dimension_no_value") - - result = helpers.get_query_dimensions(req) - self.assertEqual(result, {"Dimension_no_value": ""}) - - def test_dimension_multi_value(self): - req = Mock() - req.query_string = ("foo=bar&dimensions=Dimension_multi_value:one|two|three") - - result = helpers.get_query_dimensions(req) - self.assertEqual(result, {"Dimension_multi_value": "one|two|three"}) - - def test_dimension_with_multi_colons(self): - req = Mock() - req.query_string = ("foo=bar&dimensions=url:http://192.168.10.4:5601," - "hostname:monasca,component:kibana,service:monitoring") - - result = helpers.get_query_dimensions(req) - self.assertEqual(result, {"url": "http://192.168.10.4:5601", - "hostname": "monasca", - "component": "kibana", - "service": "monitoring"}) - - def test_empty_dimension(self): - req = Mock() - req.query_string = ("foo=bar&dimensions=") - - result = helpers.get_query_dimensions(req) - self.assertEqual(result, {}) - - -class TestGetOldQueryParams(base.BaseTestCase): - - def test_old_query_params(self): - uri = Mock() - uri.query = "foo=bar&spam=ham" - - result = helpers._get_old_query_params(uri) - self.assertEqual(result, ["foo=bar", "spam=ham"]) - - def test_old_query_params_with_equals(self): - uri = Mock() - uri.query = "foo=spam=ham" - - result = helpers._get_old_query_params(uri) - self.assertEqual(result, ["foo=spam%3Dham"]) - - def test_old_query_params_except_offset(self): - uri = Mock() - uri.query = "foo=bar&spam=ham" - result = [] - - helpers._get_old_query_params_except_offset(result, uri) - self.assertEqual(result, ["foo=bar", "spam=ham"]) - - def test_old_query_params_except_offset_with_equals(self): - uri = Mock() - uri.query = "foo=spam=ham&offset=bar" - result = [] - - helpers._get_old_query_params_except_offset(result, uri) - self.assertEqual(result, ["foo=spam%3Dham"]) diff --git a/monasca_api/tests/test_repositories.py b/monasca_api/tests/test_repositories.py deleted file mode 100644 index 2b5242e29..000000000 --- a/monasca_api/tests/test_repositories.py +++ /dev/null @@ -1,752 +0,0 @@ -# Copyright 2015 Cray Inc. All Rights Reserved. -# (C) Copyright 2016-2018 Hewlett Packard Enterprise Development LP -# Copyright 2017 Fujitsu LIMITED -# (C) Copyright 2017-2018 SUSE LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import binascii -from collections import namedtuple -from datetime import datetime - -import cassandra -from cassandra.auth import PlainTextAuthProvider -from unittest.mock import patch - -from oslo_config import cfg -from oslo_utils import timeutils - -from monasca_api.common.repositories.cassandra import metrics_repository \ - as cassandra_repo -from monasca_api.common.repositories.influxdb import metrics_repository \ - as influxdb_repo -from monasca_api.tests import base - -CONF = cfg.CONF - - -class TestRepoMetricsInfluxDB(base.BaseTestCase): - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.client.InfluxDBClient") - def test_measurement_list(self, influxdb_client_mock): - mock_client = influxdb_client_mock.return_value - mock_client.query.return_value.raw = { - "series": [ - { - "name": "dummy.series", - "values": [ - ["2015-03-14T09:26:53.59Z", 2, None], - ["2015-03-14T09:26:53.591Z", 2.5, ''], - ["2015-03-14T09:26:53.6Z", 4.0, '{}'], - ["2015-03-14T09:26:54Z", 4, '{"key": "value"}'], - ["2015-03-14T09:26:53.1234567Z", 1, '{}'] - ] - } - ] - } - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - result = repo.measurement_list( - "tenant_id", - "region", - name=None, - dimensions=None, - start_timestamp=1, - end_timestamp=2, - offset=None, - limit=1, - merge_metrics_flag=True, - group_by=None) - - self.assertEqual(len(result), 1) - self.assertIsNone(result[0]['dimensions']) - self.assertEqual(result[0]['name'], 'dummy.series') - self.assertEqual(result[0]['columns'], - ['timestamp', 'value', 'value_meta']) - - measurements = result[0]['measurements'] - - self.assertEqual( - [["2015-03-14T09:26:53.590Z", 2, {}], - ["2015-03-14T09:26:53.591Z", 2.5, {}], - ["2015-03-14T09:26:53.600Z", 4.0, {}], - ["2015-03-14T09:26:54.000Z", 4, {"key": "value"}], - ["2015-03-14T09:26:53.123Z", 1, {}]], - measurements - ) - - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.client.InfluxDBClient") - def test_list_metrics(self, influxdb_client_mock): - mock_client = influxdb_client_mock.return_value - mock_client.query.return_value.raw = { - u'series': [{ - u'values': [[ - u'disk.space_used_perc,_region=region,_tenant_id=' - u'0b5e7d8c43f74430add94fba09ffd66e,device=rootfs,' - 'hostname=host0,hosttype=native,mount_point=/' - ]], - u'columns':[u'key'] - }] - } - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - result = repo.list_metrics( - "0b5e7d8c43f74430add94fba09ffd66e", - "region", - name="disk.space_user_perc", - dimensions={ - "hostname": "host0", - "hosttype": "native", - "mount_point": "/", - "device": "rootfs"}, - offset=None, - limit=1) - - self.assertEqual(result, [{ - u'id': '0', - u'name': u'disk.space_used_perc', - u'dimensions': { - u'device': u'rootfs', - u'hostname': u'host0', - u'mount_point': u'/', - u'hosttype': u'native' - }, - }]) - - @patch('monasca_api.common.repositories.influxdb.' - 'metrics_repository.client.InfluxDBClient') - def test_metrics_statistics(self, influxdb_client_mock): - mock_client = influxdb_client_mock.return_value - mock_client.query.return_value.raw = { - u'series': [{ - u'values': [[ - u'1970-01-01T00:00:00Z', 0.047 - ]], - u'name': u'cpu.utilization', - u'columns': [u'time', u'mean']}], - u'statement_id': 0 - } - - tenant_id = '1' - region = 'USA' - name = 'cpu.utilization' - start_timestamp = 1484036107.86 - statistics = [u"avg"] - limit = 10000 - dimensions = None - end_timestamp = None - period = None - offset = None - merge_metrics_flag = None - group_by = None - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - stats_list = repo.metrics_statistics(tenant_id, region, name, - dimensions, start_timestamp, - end_timestamp, statistics, - period, offset, limit, - merge_metrics_flag, group_by) - expected_result = [{ - u'columns': [u'timestamp', u'avg'], - u'dimensions': {}, - u'id': '0', - u'name': u'cpu.utilization', - u'statistics': [[u'1970-01-01T00:00:00Z', 0.047]]}] - self.assertEqual(stats_list, expected_result) - - def test_build_group_by_clause_with_period(self): - group_by = 'hostname,service' - period = 300 - expected_clause = ' group by hostname,service,time(300s) fill(none)' - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - clause = repo._build_group_by_clause(group_by, period) - self.assertEqual(clause, expected_clause) - - def test_build_group_by_clause_without_period(self): - group_by = 'hostname,service' - expected_clause = ' group by hostname,service' - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - clause = repo._build_group_by_clause(group_by) - self.assertEqual(clause, expected_clause) - - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.client.InfluxDBClient") - def test_list_dimension_values(self, influxdb_client_mock, timestamp=False): - mock_client = influxdb_client_mock.return_value - - tenant_id = u'38dc2a2549f94d2e9a4fa1cc45a4970c' - region = u'useast' - metric = u'custom_metric' - column = u'hostname' - hostname = u'custom_host' - start_timestamp = 1571917171275 - end_timestamp = 1572917171275 - mock_client.query.return_value.raw = { - u'series': [{ - u'values': [[column, hostname]], - u'name': metric, - u'columns': [u'key', u'value'] - }] - } - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - mock_client.query.reset_mock() - - db_per_tenant = repo.conf.influxdb.db_per_tenant - database = repo.conf.influxdb.database_name - database += "_%s" % tenant_id if db_per_tenant else "" - - result = (repo.list_dimension_values(tenant_id, region, metric, column, - start_timestamp, end_timestamp) - if timestamp else - repo.list_dimension_values(tenant_id, region, metric, column)) - - self.assertEqual(result, [{u'dimension_value': hostname}]) - - query = ('select * from "{metric}"' - ' where _region = \'{region}\'' - .format(region=region, metric=metric)) - query += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\'' - .format(tenant_id=tenant_id)) - query += (' and "{column}" = \'{hostname}\'' - .format(column=column, - hostname=hostname)) - query += (' and time >= {start_timestamp}000000u' - ' and time < {end_timestamp}000000u' - .format(start_timestamp=start_timestamp, - end_timestamp=end_timestamp) - if timestamp else '') - mock_client.query.assert_called_with(query, database=database) - - def test_list_dimension_values_with_timestamp(self): - self.test_list_dimension_values(timestamp=True) - - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.client.InfluxDBClient") - def test_list_dimension_names(self, influxdb_client_mock, timestamp=False): - mock_client = influxdb_client_mock.return_value - - tenant_id = u'38dc2a2549f94d2e9a4fa1cc45a4970c' - region = u'useast' - metric = u'custom_metric' - start_timestamp = 1571917171275 - end_timestamp = 1572917171275 - mock_client.query.return_value.raw = { - u'series': [{ - u'values': [[u'_region'], [u'_tenant_id'], [u'hostname'], - [u'service']], - u'name': metric, - u'columns': [u'tagKey'] - }] - } - - repo = influxdb_repo.MetricsRepository() - repo._version = 'from_0.11.0' - mock_client.query.reset_mock() - - db_per_tenant = repo.conf.influxdb.db_per_tenant - database = repo.conf.influxdb.database_name - database += "_%s" % tenant_id if db_per_tenant else "" - - result = (repo.list_dimension_names(tenant_id, region, metric, - start_timestamp, end_timestamp) - if timestamp else - repo.list_dimension_names(tenant_id, region, metric)) - - self.assertEqual(result, - [ - {u'dimension_name': u'hostname'}, - {u'dimension_name': u'service'} - ]) - - query_last = ('select * from "{metric}"' - ' where _region = \'{region}\'' - .format(region=region, metric=metric)) - query_last += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\'' - .format(tenant_id=tenant_id)) - query_last += (' and time >= {start_timestamp}000000u' - ' and time < {end_timestamp}000000u' - .format(start_timestamp=start_timestamp, - end_timestamp=end_timestamp) - if timestamp else '') - query_last += (' and service != \'\'') - - mock_client.query.assert_called_with(query_last, database=database) - - def test_list_dimension_names_with_timestamp(self): - self.test_list_dimension_names(timestamp=True) - - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.requests.head") - def test_check_status(self, head_mock): - head_mock.return_value.ok = True - head_mock.return_value.status_code = 204 - - result = influxdb_repo.MetricsRepository.check_status() - - self.assertEqual(result, (True, 'OK')) - - @patch("monasca_api.common.repositories.influxdb." - "metrics_repository.requests.head") - def test_check_status_server_error(self, head_mock): - head_mock.return_value.status_code = 500 - head_mock.return_value.ok = False - - result = influxdb_repo.MetricsRepository.check_status() - - self.assertEqual(result, (False, 'Error: 500')) - - -class TestRepoMetricsCassandra(base.BaseTestCase): - def setUp(self): - super(TestRepoMetricsCassandra, self).setUp() - self.conf_default(contact_points='127.0.0.1', - group='cassandra') - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_init(self, cassandra_connect_mock): - repo = cassandra_repo.MetricsRepository() - self.assertIsNone( - repo.cluster.auth_provider, - 'cassandra cluster auth provider is expected to None' - ) - - repo.conf.cassandra.user = 'cassandra' - repo.conf.cassandra.password = 'cassandra' - repo = cassandra_repo.MetricsRepository() - self.assertIsInstance( - repo.cluster.auth_provider, - PlainTextAuthProvider, - 'cassandra cluster auth provider is expected to be PlainTextAuthProvider' - ) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_list_metrics(self, cassandra_connect_mock): - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_future_mock = cassandra_session_mock.execute_async.return_value - - Metric = namedtuple('Metric', 'metric_id metric_name dimensions') - - cassandra_future_mock.result.return_value = [ - Metric( - metric_id=binascii.unhexlify(b"01d39f19798ed27bbf458300bf843edd17654614"), - metric_name='disk.space_used_perc', - dimensions=[ - 'device\trootfs', - 'hostname\thost0', - 'hosttype\tnative', - 'mount_point\t/'] - ) - ] - - repo = cassandra_repo.MetricsRepository() - - result = repo.list_metrics( - "0b5e7d8c43f74430add94fba09ffd66e", - "region", - name="disk.space_user_perc", - dimensions={ - "hostname": "host0", - "hosttype": "native", - "mount_point": "/", - "device": "rootfs"}, - offset=None, - limit=1) - - self.assertEqual([{ - u'id': b'01d39f19798ed27bbf458300bf843edd17654614', - u'name': u'disk.space_used_perc', - u'dimensions': { - u'device': u'rootfs', - u'hostname': u'host0', - u'mount_point': u'/', - u'hosttype': u'native' - }}], result) - - # As Cassandra allows sparse data, it is possible to have a missing metric_id - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_list_metrics_empty_metric_id(self, cassandra_connect_mock): - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_future_mock = cassandra_session_mock.execute_async.return_value - - Metric = namedtuple('Metric', 'metric_id metric_name dimensions') - - cassandra_future_mock.result.return_value = [ - Metric( - metric_id=None, - metric_name='disk.space_used_perc', - dimensions=[ - 'device\trootfs', - 'hostname\thost0', - 'hosttype\tnative', - 'mount_point\t/'] - ) - ] - - repo = cassandra_repo.MetricsRepository() - - result = repo.list_metrics( - "0b5e7d8c43f74430add94fba09ffd66e", - "region", - name="disk.space_user_perc", - dimensions={ - "hostname": "host0", - "hosttype": "native", - "mount_point": "/", - "device": "rootfs"}, - offset=None, - limit=1) - - self.assertEqual([{ - u'id': None, - u'name': u'disk.space_used_perc', - u'dimensions': { - u'device': u'rootfs', - u'hostname': u'host0', - u'mount_point': u'/', - u'hosttype': u'native' - }}], result) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_list_metric_names(self, cassandra_connect_mock): - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_future_mock = cassandra_session_mock.execute_async.return_value - - Metric = namedtuple('Metric', 'metric_name') - - cassandra_future_mock.result.return_value = [ - Metric('disk.space_used_perc'), - Metric('cpu.idle_perc') - ] - - cassandra_session_mock.execute.return_value = [ - Metric('disk.space_used_perc'), - Metric('cpu.idle_perc') - ] - - repo = cassandra_repo.MetricsRepository() - result = repo.list_metric_names( - "0b5e7d8c43f74430add94fba09ffd66e", - "region", - dimensions={ - "hostname": "host0", - "hosttype": "native", - "mount_point": "/", - "device": "rootfs"}) - - self.assertEqual([ - { - u'name': u'cpu.idle_perc' - }, - { - u'name': u'disk.space_used_perc' - } - ], result) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_measurement_list(self, cassandra_connect_mock): - Measurement = namedtuple('Measurement', 'time_stamp value value_meta') - - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_future_mock = cassandra_session_mock.execute_async.return_value - - Metric = namedtuple('Metric', 'metric_id metric_name dimensions') - - cassandra_future_mock.result.side_effect = [ - [ - Metric( - metric_id=binascii.unhexlify(b"01d39f19798ed27bbf458300bf843edd17654614"), - metric_name='disk.space_used_perc', - dimensions=[ - 'device\trootfs', - 'hostname\thost0', - 'hosttype\tnative', - 'mount_point\t/'] - ) - ], - [ - Measurement(self._convert_time_string("2015-03-14T09:26:53.59Z"), 2, None), - Measurement(self._convert_time_string("2015-03-14T09:26:53.591Z"), 4, - '{"key": "value"}'), - Measurement(self._convert_time_string("2015-03-14T09:26:53.6Z"), 2.5, ''), - Measurement(self._convert_time_string("2015-03-14T09:26:54.0Z"), 4.0, '{}'), - ] - ] - - repo = cassandra_repo.MetricsRepository() - result = repo.measurement_list( - "tenant_id", - "region", - name="disk.space_used_perc", - dimensions=None, - start_timestamp=1, - end_timestamp=2, - offset=None, - limit=2, - merge_metrics_flag=True, - group_by=None) - - self.assertEqual(len(result), 1) - self.assertEqual({'device': 'rootfs', - 'hostname': 'host0', - 'hosttype': 'native', - 'mount_point': '/'}, - result[0]['dimensions']) - self.assertEqual(result[0]['name'], 'disk.space_used_perc') - self.assertEqual(result[0]['columns'], - ['timestamp', 'value', 'value_meta']) - - self.assertEqual( - [['2015-03-14T09:26:53.590Z', 2, {}], - ['2015-03-14T09:26:53.591Z', 4, {'key': 'value'}]], - result[0]['measurements'] - ) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_metrics_statistics(self, cassandra_connect_mock): - Measurement = namedtuple('Measurement', 'time_stamp value value_meta') - - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_future_mock = cassandra_session_mock.execute_async.return_value - - Metric = namedtuple('Metric', 'metric_id metric_name dimensions') - - cassandra_future_mock.result.side_effect = [ - [ - Metric( - metric_id=binascii.unhexlify(b"01d39f19798ed27bbf458300bf843edd17654614"), - metric_name='cpu.idle_perc', - dimensions=[ - 'device\trootfs', - 'hostname\thost0', - 'hosttype\tnative', - 'mount_point\t/'] - ) - ], - [ - Measurement(self._convert_time_string("2016-05-19T11:58:24Z"), 95.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:25Z"), 97.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:26Z"), 94.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:27Z"), 96.0, '{}'), - ] - ] - - start_timestamp = (self._convert_time_string("2016-05-19T11:58:24Z") - - datetime(1970, 1, 1)).total_seconds() - end_timestamp = (self._convert_time_string("2016-05-19T11:58:27Z") - - datetime(1970, 1, 1)).total_seconds() - print(start_timestamp) - - repo = cassandra_repo.MetricsRepository() - result = repo.metrics_statistics( - "tenant_id", - "region", - name="cpu.idle_perc", - dimensions=None, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - statistics=['avg', 'min', 'max', 'count', 'sum'], - period=300, - offset=None, - limit=1, - merge_metrics_flag=True, - group_by=None) - - self.assertEqual([ - { - u'dimensions': {'device': 'rootfs', - 'hostname': 'host0', - 'hosttype': 'native', - 'mount_point': '/'}, - u'end_time': u'2016-05-19T11:58:27.000Z', - u'statistics': [[u'2016-05-19T11:58:24.000Z', 95.5, 94.0, 97.0, 4, 382.0]], - u'name': u'cpu.idle_perc', - u'columns': [u'timestamp', 'avg', 'min', 'max', 'count', 'sum'], - u'id': b'01d39f19798ed27bbf458300bf843edd17654614' - } - ], result) - - cassandra_future_mock.result.side_effect = [ - [ - Metric( - metric_id=binascii.unhexlify(b"01d39f19798ed27bbf458300bf843edd17654614"), - metric_name='cpu.idle_perc', - dimensions=[ - 'device\trootfs', - 'hostname\thost0', - 'hosttype\tnative', - 'mount_point\t/'] - ) - ], - [ - Measurement(self._convert_time_string("2016-05-19T11:58:24Z"), 95.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:25Z"), 97.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:26Z"), 94.0, '{}'), - Measurement(self._convert_time_string("2016-05-19T11:58:27Z"), 96.0, '{}'), - ] - ] - - result = repo.metrics_statistics( - "tenant_id", - "region", - name="cpu.idle_perc", - dimensions=None, - start_timestamp=start_timestamp, - end_timestamp=None, - statistics=['avg', 'min', 'max', 'count', 'sum'], - period=300, - offset=None, - limit=1, - merge_metrics_flag=True, - group_by=None) - - self.assertEqual([ - { - u'dimensions': {'device': 'rootfs', - 'hostname': 'host0', - 'hosttype': 'native', - 'mount_point': '/'}, - u'end_time': u'2016-05-19T12:03:23.999Z', - u'statistics': [[u'2016-05-19T11:58:24.000Z', 95.5, 94.0, 97.0, 4, 382.0]], - u'name': u'cpu.idle_perc', - u'columns': [u'timestamp', 'avg', 'min', 'max', 'count', 'sum'], - u'id': b'01d39f19798ed27bbf458300bf843edd17654614' - } - ], result) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_alarm_history(self, cassandra_connect_mock): - AlarmHistory = namedtuple('AlarmHistory', 'alarm_id, time_stamp, metrics, ' - 'new_state, old_state, reason, ' - 'reason_data, sub_alarms, tenant_id') - - cassandra_session_mock = cassandra_connect_mock.return_value - cassandra_session_mock.execute.return_value = [ - AlarmHistory('741e1aa149524c0f9887a8d6750f67b1', - '09c2f5e7-9245-4b7e-bce1-01ed64a3c63d', - self._convert_time_string("2016-05-19T11:58:27Z"), - """[{ - "dimensions": {"hostname": "devstack", "service": "monitoring"}, - "id": "", - "name": "cpu.idle_perc" - }]""", - 'OK', - 'UNDETERMINED', - 'The alarm threshold(s) have not been exceeded for the sub-alarms: ' - 'avg(cpu.idle_perc) < 10.0 times 3 with the values: [84.35]', - '{}', - """[ - { - "sub_alarm_state": "OK", - "currentValues": [ - "84.35" - ], - "sub_alarm_expression": { - "function": "AVG", - "period": 60, - "threshold": 10.0, - "periods": 3, - "operator": "LT", - "metric_definition": { - "dimensions": "{}", - "id": "", - "name": "cpu.idle_perc" - } - } - } - ]""") - ] - - repo = cassandra_repo.MetricsRepository() - result = repo.alarm_history('741e1aa149524c0f9887a8d6750f67b1', - ['09c2f5e7-9245-4b7e-bce1-01ed64a3c63d'], - None, None, None, None) - - # TODO(Cassandra) shorted out temporarily until the api is implemented in Cassandra - self.assertNotEqual( - [{ - u'id': u'1463659107000', - u'time_stamp': u'2016-05-19T11:58:27.000Z', - u'new_state': u'OK', - u'old_state': u'UNDETERMINED', - u'reason_data': u'{}', - u'reason': u'The alarm threshold(s) have not been exceeded for the sub-alarms: ' - u'avg(cpu.idle_perc) < 10.0 times 3 with the values: [84.35]', - u'alarm_id': u'09c2f5e7-9245-4b7e-bce1-01ed64a3c63d', - u'metrics': [{ - u'id': u'', - u'name': u'cpu.idle_perc', - u'dimensions': { - u'service': u'monitoring', - u'hostname': u'devstack' - } - }], - u'sub_alarms': [ - { - u'sub_alarm_state': u'OK', - u'currentValues': [ - u'84.35' - ], - u'sub_alarm_expression': { - u'dimensions': u'{}', - u'threshold': 10.0, - u'periods': 3, - u'operator': u'LT', - u'period': 60, - u'metric_name': u'cpu.idle_perc', - u'function': u'AVG' - } - } - ] - }], result) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_check_status(self, _): - repo = cassandra_repo.MetricsRepository() - - result = repo.check_status() - - self.assertEqual(result, (True, 'OK')) - - @patch("monasca_api.common.repositories.cassandra." - "metrics_repository.Cluster.connect") - def test_check_status_server_error(self, cassandra_connect_mock): - repo = cassandra_repo.MetricsRepository() - cassandra_connect_mock.side_effect = \ - cassandra.DriverException("Cluster is already shut down") - - result = repo.check_status() - - self.assertEqual(result, (False, 'Cluster is already shut down')) - - @staticmethod - def _convert_time_string(date_time_string): - dt = timeutils.parse_isotime(date_time_string) - dt = timeutils.normalize_time(dt) - return dt diff --git a/monasca_api/tests/test_request.py b/monasca_api/tests/test_request.py deleted file mode 100644 index ffd40ed9d..000000000 --- a/monasca_api/tests/test_request.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright 2016-2017 FUJITSU LIMITED -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo_policy import policy as os_policy - -from monasca_api.api.core import request -from monasca_api.common.policy import policy_engine as policy -import monasca_api.common.repositories.constants as const -from monasca_api.tests import base -from monasca_api.v2.common import exceptions - - -class TestRequest(base.BaseApiTestCase): - def setUp(self): - super(TestRequest, self).setUp() - rules = [ - os_policy.RuleDefault("example:allowed", "user_id:222"), - ] - policy.reset() - policy.init() - policy._ENFORCER.register_defaults(rules) - - def test_use_context_from_request(self): - req = request.Request( - self.create_environ( - path='/', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - 'X_ROLES': 'terminator,predator', - }, - query_string='project_id=444' - - ) - ) - - self.assertEqual('111', req.context.auth_token) - self.assertEqual('222', req.user_id) - self.assertEqual('333', req.project_id) - self.assertEqual(['terminator', 'predator'], req.roles) - self.assertEqual('444', req.cross_project_id) - - def test_policy_validation_with_target(self): - req = request.Request( - self.create_environ( - path='/', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - } - ) - ) - target = {'project_id': req.project_id, - 'user_id': req.user_id} - self.assertEqual(True, req.can('example:allowed', target)) - - def test_policy_validation_without_target(self): - req = request.Request( - self.create_environ( - path='/', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - } - ) - ) - self.assertEqual(True, req.can('example:allowed')) - - -class TestRequestLimit(base.BaseApiTestCase): - - def test_valid_limit(self): - expected_limit = 10 - req = request.Request( - self.create_environ( - path='/', - query_string='limit=%d' % expected_limit, - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - 'X_ROLES': 'terminator,predator' - } - ) - ) - self.assertEqual(expected_limit, req.limit) - - def test_invalid_limit(self): - req = request.Request( - self.create_environ( - path='/', - query_string='limit=abc', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - 'X_ROLES': 'terminator,predator' - } - ) - ) - - # note(trebskit) assertRaises fails to call property - # so we need the actual function - def property_wrapper(): - return req.limit - - self.assertRaises( - exceptions.HTTPUnprocessableEntityError, - property_wrapper - ) - - def test_default_limit(self): - req = request.Request( - self.create_environ( - path='/', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - 'X_ROLES': 'terminator,predator' - } - ) - ) - self.assertEqual(const.PAGE_LIMIT, req.limit) - - def test_to_big_limit(self): - req = request.Request( - self.create_environ( - path='/', - headers={ - 'X_AUTH_TOKEN': '111', - 'X_USER_ID': '222', - 'X_PROJECT_ID': '333', - 'X_ROLES': 'terminator,predator' - }, - query_string='limit={}'.format(const.PAGE_LIMIT + 1), - ) - ) - self.assertEqual(const.PAGE_LIMIT, req.limit) diff --git a/monasca_api/tests/test_rest.py b/monasca_api/tests/test_rest.py deleted file mode 100644 index 33e60066c..000000000 --- a/monasca_api/tests/test_rest.py +++ /dev/null @@ -1,81 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from unittest import mock - -from oslotest import base - -from monasca_api.common.rest import exceptions -from monasca_api.common.rest import utils - - -class TestRestUtils(base.BaseTestCase): - - def setUp(self): - super(TestRestUtils, self).setUp() - self.mock_json_patcher = mock.patch('monasca_api.common.rest.utils.json') - self.mock_json = self.mock_json_patcher.start() - - def tearDown(self): - super(TestRestUtils, self).tearDown() - self.mock_json_patcher.stop() - - def test_read_body_with_success(self): - self.mock_json.loads.return_value = "" - payload = mock.Mock() - - utils.read_body(payload) - - self.mock_json.loads.assert_called_once_with(payload.read.return_value) - - def test_read_body_empty_content_in_payload(self): - self.mock_json.loads.return_value = "" - payload = mock.Mock() - payload.read.return_value = None - - self.assertIsNone(utils.read_body(payload)) - - def test_read_body_json_loads_exception(self): - self.mock_json.loads.side_effect = Exception - payload = mock.Mock() - - self.assertRaises(exceptions.DataConversionException, - utils.read_body, payload) - - def test_read_body_unsupported_content_type(self): - unsupported_content_type = mock.Mock() - - self.assertRaises( - exceptions.UnsupportedContentTypeException, utils.read_body, None, - unsupported_content_type) - - def test_read_body_unreadable_content_error(self): - unreadable_content = mock.Mock() - unreadable_content.read.side_effect = Exception - - self.assertRaises( - exceptions.UnreadableContentError, - utils.read_body, unreadable_content) - - def test_as_json_success(self): - data = mock.Mock() - - dumped_json = utils.as_json(data) - - self.assertEqual(dumped_json, self.mock_json.dumps.return_value) - - def test_as_json_with_exception(self): - data = mock.Mock() - self.mock_json.dumps.side_effect = Exception - - self.assertRaises(exceptions.DataConversionException, - utils.as_json, data) diff --git a/monasca_api/tests/test_sub_alarm_definition.py b/monasca_api/tests/test_sub_alarm_definition.py deleted file mode 100644 index 091b5e713..000000000 --- a/monasca_api/tests/test_sub_alarm_definition.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2019 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from monasca_api.common.repositories.model import sub_alarm_definition -from monasca_api.expression_parser import alarm_expr_parser -from monasca_api.tests import base - - -class TestSubAlarmDefinition(base.BaseTestCase): - def test_init_from_row(self): - sub_alarm_d_dict = {'id': '111', - 'alarm_definition_id': '123', - 'function': 'AVG', - 'metric_name': 'darth.vader', - 'operator': 'GT', - 'threshold': 10, - 'period': 60, - 'periods': 1, - 'is_deterministic': 1, - 'dimensions': 'device=1,image_id=888'} - dimension_dict = {'device': '1', - 'image_id': '888'} - sub_alarm_d = sub_alarm_definition.SubAlarmDefinition(row=sub_alarm_d_dict) - self.assertEqual(sub_alarm_d_dict['id'], sub_alarm_d.id) - self.assertEqual(sub_alarm_d_dict['alarm_definition_id'], sub_alarm_d.alarm_definition_id) - self.assertEqual(sub_alarm_d_dict['metric_name'], sub_alarm_d.metric_name) - self.assertEqual(sub_alarm_d_dict['dimensions'], sub_alarm_d.dimensions_str) - self.assertEqual(dimension_dict, sub_alarm_d.dimensions) - self.assertEqual(sub_alarm_d_dict['function'], sub_alarm_d.function) - self.assertEqual(sub_alarm_d_dict['operator'], sub_alarm_d.operator) - self.assertEqual(sub_alarm_d_dict['period'], sub_alarm_d.period) - self.assertEqual(sub_alarm_d_dict['periods'], sub_alarm_d.periods) - self.assertEqual(sub_alarm_d_dict['threshold'], sub_alarm_d.threshold) - self.assertEqual(True, sub_alarm_d.deterministic) - - def test_init_from_sub_expr(self): - sub_alarm_d_dict = {'function': 'AVG', - 'metric_name': 'darth.vader', - 'operator': 'GT', - 'threshold': 10.0, - 'period': 60, - 'periods': 1, - 'is_deterministic': 0, - 'dimensions': 'device=1,image_id=888'} - dimension_dict = {'device': '1', - 'image_id': '888'} - expression = 'avg(darth.vader{device=1,image_id=888}, 60) GT 10.0 times 1' - sub_expr_list = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - sub_alarm_d = sub_alarm_definition.SubAlarmDefinition(sub_expr=sub_expr_list[0]) - self.assertEqual(sub_alarm_d_dict['metric_name'], sub_alarm_d.metric_name) - self.assertEqual(sub_alarm_d_dict['dimensions'], sub_alarm_d.dimensions_str) - self.assertEqual(dimension_dict, sub_alarm_d.dimensions) - self.assertEqual(sub_alarm_d_dict['function'], sub_alarm_d.function) - self.assertEqual(sub_alarm_d_dict['operator'], sub_alarm_d.operator) - self.assertEqual(sub_alarm_d_dict['period'], sub_alarm_d.period) - self.assertEqual(sub_alarm_d_dict['periods'], sub_alarm_d.periods) - self.assertEqual(sub_alarm_d_dict['threshold'], sub_alarm_d.threshold) - self.assertEqual(False, sub_alarm_d.deterministic) - - def test_init_from_both_row_and_sub_expr(self): - sub_alarm_d_dict = {'id': '111', - 'alarm_definition_id': '123', - 'function': 'AVG', - 'metric_name': 'darth.vader', - 'operator': 'GT', - 'threshold': 10, - 'period': 60, - 'periods': 1, - 'is_deterministic': 0, - 'dimensions': 'device=1,image_id=888'} - expression = 'avg(darth.vader.compute{device=1,image_id=888}, 60) GT 10.0 times 1' - sub_expr_list = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - self.assertRaises(Exception, sub_alarm_definition.SubAlarmDefinition, - sub_alarm_d_dict, sub_expr_list) # noqa: E202 - - def test_build_expression_all_parameters(self): - expression = 'avg(darth.vader{over=9000}, deterministic, 60) GT 10.0 times 1' - sub_expr_list = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list - sub_alarm_d = sub_alarm_definition.SubAlarmDefinition(sub_expr=sub_expr_list[0]) - self.assertEqual(expression, sub_alarm_d.expression) - - sub_alarm_d.dimensions_str = None - sub_alarm_d.dimensions = None - sub_alarm_d.deterministic = False - sub_alarm_d.period = None - sub_alarm_d.periods = None - self.assertEqual('avg(darth.vader) GT 10.0', sub_alarm_d.expression) - - def test_equality_method(self): - sub_alarm_d_dict = {'id': '111', - 'alarm_definition_id': '123', - 'function': 'AVG', - 'metric_name': 'darth.vader', - 'operator': 'GT', - 'threshold': 10, - 'period': 60, - 'periods': 1, - 'is_deterministic': 0, - 'dimensions': 'device=1,image_id=888'} - sub_alarm_d = sub_alarm_definition.SubAlarmDefinition(row=sub_alarm_d_dict) - sub_alarm_d2 = sub_alarm_definition.SubAlarmDefinition(row=sub_alarm_d_dict) - - # same object - self.assertEqual(True, sub_alarm_d == sub_alarm_d) - # different type - self.assertEqual(False, sub_alarm_d == list()) - # Equal - self.assertEqual(True, sub_alarm_d == sub_alarm_d2) - # equal but different id - sub_alarm_d2.id = '222' - self.assertEqual(True, sub_alarm_d == sub_alarm_d2) - # Not Equal - sub_alarm_d2.metric_name = 'luck.iamyourfather' - self.assertEqual(False, sub_alarm_d == sub_alarm_d2) diff --git a/monasca_api/tests/test_validation.py b/monasca_api/tests/test_validation.py deleted file mode 100644 index dc14e79a4..000000000 --- a/monasca_api/tests/test_validation.py +++ /dev/null @@ -1,379 +0,0 @@ -# (C) Copyright 2015-2017 Hewlett Packard Enterprise Development LP -# Copyright 2015 Cray Inc. All Rights Reserved. -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from unittest import mock - -from monasca_api.tests import base -import monasca_api.v2.common.exceptions as common_exceptions -import monasca_api.v2.common.schemas.alarm_definition_request_body_schema as schemas_alarm_defs -import monasca_api.v2.common.schemas.exceptions as schemas_exceptions -import monasca_api.v2.common.schemas.notifications_request_body_schema as schemas_notifications -import monasca_api.v2.common.validation as validation -import monasca_api.v2.reference.helpers as helpers - - -def mock_req_can(authorised_rule): - if authorised_rule != 'authorized': - raise Exception - - -class TestStateValidation(base.BaseTestCase): - - VALID_STATES = "OK", "ALARM", "UNDETERMINED" - - def test_valid_states(self): - for state in self.VALID_STATES: - validation.validate_alarm_state(state) - - def test_valid_states_lower_case(self): - for state in self.VALID_STATES: - validation.validate_alarm_state(state.lower()) - - def test_invalid_state(self): - self.assertRaises(common_exceptions.HTTPUnprocessableEntityError, - validation.validate_alarm_state, 'BOGUS') - - -class TestSeverityValidation(base.BaseTestCase): - - VALID_SEVERITIES = "LOW", "MEDIUM", "HIGH", "CRITICAL" - - def test_valid_severities(self): - for state in self.VALID_SEVERITIES: - validation.validate_severity_query(state) - - def test_valid_severities_lower_case(self): - for state in self.VALID_SEVERITIES: - validation.validate_severity_query(state.lower()) - - def test_valid_multi_severities(self): - validation.validate_severity_query('|'.join(self.VALID_SEVERITIES)) - - def test_valid_multi_severities_lower_case(self): - validation.validate_severity_query('|'.join(self.VALID_SEVERITIES) - .lower()) - - def test_invalid_state(self): - self.assertRaises(common_exceptions.HTTPUnprocessableEntityError, - validation.validate_severity_query, - 'BOGUS') - self.assertRaises(common_exceptions.HTTPUnprocessableEntityError, - validation.validate_severity_query, - '|'.join([self.VALID_SEVERITIES[0], 'BOGUS'])) - - -class TestRuleValidation(base.BaseApiTestCase): - def test_rule_valid(self): - req = mock.Mock() - req.can = mock_req_can - test_rules = ['Rule1', 'authorized'] - helpers.validate_authorization(req, test_rules) - - def test_rule_invalid(self): - req = mock.Mock() - req.can = mock_req_can - test_rules = ['rule1', 'rule2'] - self.assertRaises( - falcon.HTTPUnauthorized, - helpers.validate_authorization, req, test_rules) - - -class TestTimestampsValidation(base.BaseTestCase): - - def test_valid_timestamps(self): - start_time = '2015-01-01T00:00:00Z' - end_time = '2015-01-01T00:00:01Z' - start_timestamp = helpers._convert_time_string(start_time) - end_timestamp = helpers._convert_time_string(end_time) - - try: - helpers.validate_start_end_timestamps(start_timestamp, - end_timestamp) - except Exception: - self.fail("shouldn't happen") - - def test_same_timestamps(self): - start_time = '2015-01-01T00:00:00Z' - end_time = start_time - start_timestamp = helpers._convert_time_string(start_time) - end_timestamp = helpers._convert_time_string(end_time) - - self.assertRaises( - falcon.HTTPBadRequest, - helpers.validate_start_end_timestamps, - start_timestamp, end_timestamp) - - def test_end_before_than_start(self): - start_time = '2015-01-01T00:00:00Z' - end_time = '2014-12-31T23:59:59Z' - start_timestamp = helpers._convert_time_string(start_time) - end_timestamp = helpers._convert_time_string(end_time) - - self.assertRaises( - falcon.HTTPBadRequest, - helpers.validate_start_end_timestamps, - start_timestamp, end_timestamp) - - -class TestConvertTimeString(base.BaseTestCase): - - def test_valid_date_time_string(self): - date_time_string = '2015-01-01T00:00:00Z' - - timestamp = helpers._convert_time_string(date_time_string) - self.assertEqual(1420070400., timestamp) - - def test_valid_date_time_string_with_mills(self): - date_time_string = '2015-01-01T00:00:00.025Z' - - timestamp = helpers._convert_time_string(date_time_string) - self.assertEqual(1420070400.025, timestamp) - - def test_valid_date_time_string_with_timezone(self): - date_time_string = '2015-01-01T09:00:00+09:00' - - timestamp = helpers._convert_time_string(date_time_string) - self.assertEqual(1420070400., timestamp) - - def test_invalid_date_time_string(self): - date_time_string = '2015-01-01T00:00:000Z' - - self.assertRaises( - ValueError, - helpers._convert_time_string, date_time_string) - - -valid_periods = [0, 60] - - -class TestNotificationValidation(base.BaseTestCase): - - def test_validation_for_email(self): - notification = {"name": "MyEmail", "type": "EMAIL", "address": "name@domain.com"} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_exception_for_invalid_email_address(self): - notification = {"name": "MyEmail", "type": "EMAIL", "address": "name@"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("Address name@ is not of correct format", str(ex)) - - def test_validation_for_email_non_zero_period(self): - notification = { - "name": "MyEmail", - "type": "EMAIL", - "address": "name@domain.com", - "period": "60"} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_for_webhook(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "http://somedomain.com"} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_for_webhook_non_zero_period(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "http://somedomain.com", - "period": 60} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_exception_for_webhook_no_scheme(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "//somedomain.com"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("Address //somedomain.com does not have URL scheme", str(ex)) - - def test_validation_exception_for_webhook_no_netloc(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "http://"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("Address http:// does not have network location", str(ex)) - - def test_validation_exception_for_webhook_invalid_scheme(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "ftp://somedomain.com"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("Address ftp://somedomain.com scheme is not in ['http', 'https']", str(ex)) - - def test_validation_exception_for_webhook_invalid_period(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "//somedomain.com", - "period": "10"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("10 is not in the configured list of valid periods: [0, 60]", str(ex)) - - def test_validation_for_pagerduty(self): - notification = {"name": "MyPagerduty", "type": "PAGERDUTY", - "address": "nzH2LVRdMzun11HNC2oD"} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_for_pagerduty_non_zero_period(self): - notification = {"name": "MyPagerduty", "type": "PAGERDUTY", - "address": "nzH2LVRdMzun11HNC2oD", "period": 60} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_for_max_name_address(self): - name = "A" * 250 - self.assertEqual(250, len(name)) - address = "http://" + "A" * 502 + ".io" - self.assertEqual(512, len(address)) - notification = {"name": name, "type": "WEBHOOK", "address": address} - try: - schemas_notifications.parse_and_validate(notification, valid_periods) - except schemas_exceptions.ValidationException: - self.fail("shouldn't happen") - - def test_validation_exception_for_exceeded_name_length(self): - name = "A" * 251 - self.assertEqual(251, len(name)) - notification = {"name": name, "type": "WEBHOOK", "address": "http://somedomain.com"} - self.assertRaises( - schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - - def test_validation_exception_for_exceeded_address_length(self): - address = "http://" + "A" * 503 + ".io" - self.assertEqual(513, len(address)) - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": address} - self.assertRaises( - schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, notification, valid_periods) - - def test_validation_exception_for_invalid_period_float(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "//somedomain.com", - "period": 1.2} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("expected int for dictionary value @ data['period']", str(ex)) - - def test_validation_exception_for_invalid_period_non_int(self): - notification = {"name": "MyWebhook", "type": "WEBHOOK", "address": "//somedomain.com", - "period": "zero"} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods) - self.assertEqual("Period zero must be a valid integer", str(ex)) - - def test_validation_exception_for_missing_period(self): - notification = {"name": "MyEmail", "type": "EMAIL", "address": "name@domain."} - ex = self.assertRaises(schemas_exceptions.ValidationException, - schemas_notifications.parse_and_validate, - notification, valid_periods, require_all=True) - self.assertEqual("Period is required", str(ex)) - - -class TestAlarmDefinitionValidation(base.BaseTestCase): - - def setUp(self): - super(TestAlarmDefinitionValidation, self).setUp() - self.full_alarm_definition = ( - {"name": self._create_string_of_length(255), - "expression": "min(cpu.idle_perc) < 10", - "description": self._create_string_of_length(255), - "severity": "MEDIUM", - "match_by": ["hostname"], - "ok_actions:": [self._create_string_of_length(50)], - "undetermined_actions": [self._create_string_of_length(50)], - "alarm_actions": [self._create_string_of_length(50)], - "actions_enabled": True}) - - def _create_string_of_length(self, length): - s = '' - for i in range(0, length): - s += str(i % 10) - return s - - def test_validation_good_minimum(self): - alarm_definition = {"name": "MyAlarmDefinition", "expression": "min(cpu.idle_perc) < 10"} - try: - schemas_alarm_defs.validate(alarm_definition) - except schemas_exceptions.ValidationException as e: - self.fail("shouldn't happen: {}".format(str(e))) - - def test_validation_good_full(self): - alarm_definition = self.full_alarm_definition - try: - schemas_alarm_defs.validate(alarm_definition) - except schemas_exceptions.ValidationException as e: - self.fail("shouldn't happen: {}".format(str(e))) - - def _ensure_fails_with_new_value(self, name, value): - alarm_definition = self.full_alarm_definition.copy() - alarm_definition[name] = value - self._ensure_validation_fails(alarm_definition) - - def _ensure_validation_fails(self, alarm_definition): - self.assertRaises( - schemas_exceptions.ValidationException, - schemas_alarm_defs.validate, alarm_definition) - - def _run_duplicate_action_test(self, actions_type): - actions = ["a", "b", "a"] - self._ensure_fails_with_new_value(actions_type, actions) - - def test_validation_too_long_name(self): - self._ensure_fails_with_new_value("name", - self._create_string_of_length(256)) - - def test_validation_too_long_description(self): - self._ensure_fails_with_new_value("description", - self._create_string_of_length(256)) - - def test_validation_duplicate_ok_actions(self): - self._run_duplicate_action_test("ok_actions") - - def test_validation_duplicate_alarm_actions(self): - self._run_duplicate_action_test("alarm_actions") - - def test_validation_duplicate_undetermined_actions(self): - self._run_duplicate_action_test("undetermined_actions") - - def test_validation_too_many_actions(self): - actions = [self._create_string_of_length(51)] - self._ensure_fails_with_new_value("ok_actions", actions) - - def test_validation_invalid_severity(self): - self._ensure_fails_with_new_value("severity", "BOGUS") - - def test_validation_invalid_match_by(self): - self._ensure_fails_with_new_value("match_by", "NOT_A_LIST") - - def test_validation_invalid_actions_enabled(self): - self._ensure_fails_with_new_value("actions_enabled", 42) diff --git a/monasca_api/tests/test_versions.py b/monasca_api/tests/test_versions.py deleted file mode 100644 index 6e191f543..000000000 --- a/monasca_api/tests/test_versions.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2015 Hewlett-Packard -# Copyright 2017 Fujitsu LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -import falcon - -from monasca_api.tests import base -from monasca_api.v2.reference import versions - - -class TestVersions(base.BaseApiTestCase): - - def setUp(self): - super(TestVersions, self).setUp() - self.app.add_route('/', versions.Versions()) - self.app.add_route('/{version_id}', versions.Versions()) - - def test_list_versions(self): - result = self.simulate_request(path='/') - self.assertEqual(result.status, falcon.HTTP_200) - response = result.json - self.assertIsInstance(response, dict) - self.assertEqual(set(['links', 'elements']), - set(response)) - links = response['links'] - self.assertIsInstance(links, list) - link = links[0] - self.assertEqual(set(['rel', 'href']), - set(link)) - self.assertEqual(link['rel'], u'self') - self.assertTrue(link['href'].endswith('/')) - - def test_valid_version_id(self): - result = self.simulate_request(path='/v2.0') - self.assertEqual(result.status, falcon.HTTP_200) - response = result.json - self.assertIsInstance(response, dict) - version = response - self.assertEqual(set(['id', 'links', 'status', 'updated']), - set(version)) - self.assertEqual(version['id'], u'v2.0') - self.assertEqual(version['status'], u'CURRENT') - date_object = datetime.datetime.strptime(version['updated'], - "%Y-%m-%dT%H:%M:%S.%fZ") - self.assertIsInstance(date_object, datetime.datetime) - links = response['links'] - self.assertIsInstance(links, list) - link = links[0] - self.assertEqual(set(['rel', 'href']), - set(link)) - self.assertEqual(link['rel'], u'self') - self.assertTrue(link['href'].endswith('/v2.0')) - - def test_invalid_version_id(self): - result = self.simulate_request(path='/v1.0') - self.assertEqual(result.status, '422 Unprocessable Entity') diff --git a/monasca_api/v2/__init__.py b/monasca_api/v2/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/v2/common/__init__.py b/monasca_api/v2/common/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/v2/common/bulk_processor.py b/monasca_api/v2/common/bulk_processor.py deleted file mode 100644 index 2d034a66f..000000000 --- a/monasca_api/v2/common/bulk_processor.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -from monasca_api.api.core.log import log_publisher -from monasca_api.api.core.log import model -from monasca_api.api.core.log import validation -from monasca_api import conf - -LOG = log.getLogger(__name__) -CONF = conf.CONF - - -class BulkProcessor(log_publisher.LogPublisher): - """BulkProcessor for effective log processing and publishing. - - BulkProcessor is customized version of - :py:class:`monasca_log_api.app.base.log_publisher.LogPublisher` - that utilizes processing of bulk request inside single loop. - - """ - - def __init__(self, logs_in_counter=None, logs_rejected_counter=None): - """Initializes BulkProcessor. - - :param logs_in_counter: V2 received logs counter - :param logs_rejected_counter: V2 rejected logs counter - """ - super(BulkProcessor, self).__init__() - - self.service_region = CONF.region - - def send_message(self, logs, global_dimensions=None, log_tenant_id=None): - """Sends bulk package to kafka - - :param list logs: received logs - :param dict global_dimensions: global dimensions for each log - :param str log_tenant_id: tenant who sent logs - """ - - num_of_msgs = len(logs) if logs else 0 - to_send_msgs = [] - - LOG.debug('Bulk package ', - num_of_msgs, global_dimensions, log_tenant_id) - - try: - for log_el in logs: - t_el = self._transform_message(log_el, - global_dimensions, - log_tenant_id) - if t_el: - to_send_msgs.append(t_el) - self._publish(to_send_msgs) - - except Exception as ex: - LOG.error('Failed to send bulk package ', - num_of_msgs, global_dimensions) - LOG.exception(ex) - - def _transform_message(self, log_element, *args): - try: - validation.validate_log_message(log_element) - - log_envelope = model.Envelope.new_envelope( - log=log_element, - tenant_id=args[1], - region=self.service_region, - dimensions=self._get_dimensions(log_element, - global_dims=args[0]) - ) - - msg_payload = (super(BulkProcessor, self) - ._transform_message(log_envelope)) - - return msg_payload - except Exception as ex: - LOG.error('Log transformation failed, rejecting log') - LOG.exception(ex) - - return None - - def _create_envelope(self, log_element, tenant_id, dimensions=None): - """Create a log envelope. - - :param dict log_element: raw log element - :param str tenant_id: tenant who sent logs - :param dict dimensions: log dimensions - :return: log envelope - :rtype: model.Envelope - - """ - return - - def _get_dimensions(self, log_element, global_dims=None): - """Get the dimensions of log element. - - If global dimensions are specified and passed to this method, - both instances are merged with each other. - - If neither is specified empty dictionary is returned. - - If only local dimensions are specified they are returned without any - additional operations. The last statement applies also - to global dimensions. - - :param dict log_element: raw log instance - :param dict global_dims: global dimensions or None - :return: local dimensions merged with global dimensions - :rtype: dict - """ - local_dims = log_element.get('dimensions', {}) - - if not global_dims: - global_dims = {} - if local_dims: - validation.validate_dimensions(local_dims) - - dimensions = global_dims.copy() - dimensions.update(local_dims) - - return dimensions diff --git a/monasca_api/v2/common/exceptions.py b/monasca_api/v2/common/exceptions.py deleted file mode 100644 index 6b7a33ea9..000000000 --- a/monasca_api/v2/common/exceptions.py +++ /dev/null @@ -1,37 +0,0 @@ -# (C) Copyright 2015,2016,2017 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from falcon.http_error import HTTPError - - -class HTTPUnprocessableEntityError(HTTPError): - def __init__(self, title, description, **kwargs): - HTTPError.__init__( - self, - '422 Unprocessable Entity', - title=title, - description=description, - **kwargs - ) - - -class HTTPBadRequestError(HTTPError): - def __init__(self, title, description, **kwargs): - HTTPError.__init__( - self, - '400 Bad Request', - title=title, - description=description, - **kwargs - ) diff --git a/monasca_api/v2/common/helpers.py b/monasca_api/v2/common/helpers.py deleted file mode 100644 index 925d62071..000000000 --- a/monasca_api/v2/common/helpers.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# Copyright 2015 Cray Inc. All Rights Reserved. -# Copyright 2016 Hewlett Packard Enterprise Development Company LP -# Copyright 2016 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from oslo_log import log - -from monasca_api.api.core.log import exceptions -from monasca_api.api.core.log import validation -from monasca_api.common.rest import utils as rest_utils - - -LOG = log.getLogger(__name__) - - -def read_json_msg_body(req): - """Read the json_msg from the http request body and return them as JSON. - - :param req: HTTP request object. - :return: Returns the metrics as a JSON object. - :raises falcon.HTTPBadRequest: - """ - try: - msg = req.stream.read() - json_msg = rest_utils.from_json(msg) - return json_msg - - except rest_utils.exceptions.DataConversionException as ex: - LOG.debug(ex) - raise falcon.HTTPBadRequest('Bad request', - 'Request body is not valid JSON') - except ValueError as ex: - LOG.debug(ex) - raise falcon.HTTPBadRequest('Bad request', - 'Request body is not valid JSON') - - -def get_global_dimensions(request_body): - """Get the top level dimensions in the HTTP request body.""" - global_dims = request_body.get('dimensions', {}) - validation.validate_dimensions(global_dims) - return global_dims - - -def get_logs(request_body): - """Get the logs in the HTTP request body.""" - if 'logs' not in request_body: - raise exceptions.HTTPUnprocessableEntity( - 'Unprocessable Entity Logs not found') - return request_body['logs'] diff --git a/monasca_api/v2/common/schemas/__init__.py b/monasca_api/v2/common/schemas/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/v2/common/schemas/alarm_definition_request_body_schema.py b/monasca_api/v2/common/schemas/alarm_definition_request_body_schema.py deleted file mode 100644 index 69ef7c16e..000000000 --- a/monasca_api/v2/common/schemas/alarm_definition_request_body_schema.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -import six -from voluptuous import All -from voluptuous import Any -from voluptuous import Invalid -from voluptuous import Length -from voluptuous import Marker -from voluptuous import Required -from voluptuous import Schema -from voluptuous import Upper - -from monasca_api.v2.common.schemas import exceptions - - -LOG = log.getLogger(__name__) - -MAX_ITEM_LENGTH = 50 - - -def validate_action_list(notification_ids, action_type): - if not isinstance(notification_ids, list): - raise Invalid('Not a list: {}'.format(type(notification_ids))) - existing = [] - for notification_id in notification_ids: - if not isinstance(notification_id, (str, six.text_type)): - raise Invalid('list item <{}> -> {} not one of (str, unicode)' - .format(notification_id, type(notification_id))) - if len(notification_id) > MAX_ITEM_LENGTH: - raise Invalid('length {} > {}'.format(len(notification_id), - MAX_ITEM_LENGTH)) - if notification_id in existing: - raise Invalid('Duplicate {} notification method {}' - .format(action_type, notification_id)) - existing.append(notification_id) - - -def validate_ok_action_list(v): - validate_action_list(v, 'OK') - - -def validate_alarm_action_list(v): - validate_action_list(v, 'ALARM') - - -def validate_undetermined_action_list(v): - validate_action_list(v, 'UNDETERMINED') - - -alarm_definition_schema = { - Required('name'): All(Any(str, six.text_type), Length(max=255)), - Required('expression'): All(Any(str, six.text_type)), - Marker('description'): All(Any(str, six.text_type), Length(max=255)), - Marker('severity'): All(Upper, Any('LOW', 'MEDIUM', 'HIGH', 'CRITICAL')), - Marker('match_by'): Any([six.text_type], [str]), - Marker('ok_actions'): validate_ok_action_list, - Marker('alarm_actions'): validate_alarm_action_list, - Marker('undetermined_actions'): validate_undetermined_action_list, - Marker('actions_enabled'): bool} - - -def validate(msg, require_all=False): - try: - request_body_schema = Schema(alarm_definition_schema, - required=require_all, - extra=True) - request_body_schema(msg) - except Exception as ex: - LOG.debug(ex) - raise exceptions.ValidationException(str(ex)) diff --git a/monasca_api/v2/common/schemas/alarm_update_schema.py b/monasca_api/v2/common/schemas/alarm_update_schema.py deleted file mode 100644 index fbae55b32..000000000 --- a/monasca_api/v2/common/schemas/alarm_update_schema.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2015 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -import six -import voluptuous - -from monasca_api.v2.common.schemas import exceptions - - -LOG = log.getLogger(__name__) - -alarm_update_schema = { - voluptuous.Optional('state'): voluptuous.All( - voluptuous.Any('OK', 'ALARM', 'UNDETERMINED')), - voluptuous.Optional('lifecycle_state'): voluptuous.All( - voluptuous.Any(str, six.text_type), voluptuous.Length(max=50)), - voluptuous.Optional('link'): voluptuous.All( - voluptuous.Any(str, six.text_type), voluptuous.Length(max=512)) -} - - -request_body_schema = voluptuous.Schema(alarm_update_schema, required=True, - extra=True) - - -def validate(msg): - try: - request_body_schema(msg) - except Exception as ex: - LOG.debug(ex) - raise exceptions.ValidationException(str(ex)) diff --git a/monasca_api/v2/common/schemas/exceptions.py b/monasca_api/v2/common/schemas/exceptions.py deleted file mode 100644 index cc20e12b2..000000000 --- a/monasca_api/v2/common/schemas/exceptions.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class ValidationException(Exception): - pass diff --git a/monasca_api/v2/common/schemas/notifications_request_body_schema.py b/monasca_api/v2/common/schemas/notifications_request_body_schema.py deleted file mode 100644 index e108ce369..000000000 --- a/monasca_api/v2/common/schemas/notifications_request_body_schema.py +++ /dev/null @@ -1,93 +0,0 @@ -# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import monasca_api.v2.common.validation as validation -from oslo_log import log -import six -import six.moves.urllib.parse as urlparse -from voluptuous import All -from voluptuous import Any -from voluptuous import Length -from voluptuous import Marker -from voluptuous import Required -from voluptuous import Schema - -from monasca_api.v2.common.schemas import exceptions - -LOG = log.getLogger(__name__) - -schemes = ['http', 'https'] - -notification_schema = { - Required('name'): Schema(All(Any(str, six.text_type), Length(max=250))), - Required('type'): Schema(Any(str, six.text_type)), - Required('address'): Schema(All(Any(str, six.text_type), Length(max=512))), - Marker('period'): All(Any(int, str))} - -request_body_schema = Schema(Any(notification_schema)) - - -def parse_and_validate(msg, valid_periods, require_all=False): - try: - request_body_schema(msg) - except Exception as ex: - LOG.exception(ex) - raise exceptions.ValidationException(str(ex)) - - if 'period' not in msg: - if require_all: - raise exceptions.ValidationException("Period is required") - else: - msg['period'] = 0 - else: - msg['period'] = _parse_and_validate_period(msg['period'], valid_periods) - - notification_type = str(msg['type']).upper() - - if notification_type == 'EMAIL': - _validate_email(msg['address']) - elif notification_type == 'WEBHOOK': - _validate_url(msg['address']) - - -def _validate_email(address): - if not validation.validate_email_address(address): - raise exceptions.ValidationException("Address {} is not of correct format".format(address)) - - -def _validate_url(address): - try: - parsed = urlparse.urlparse(address) - except Exception: - raise exceptions.ValidationException("Address {} is not of correct format".format(address)) - - if not parsed.scheme: - raise exceptions.ValidationException("Address {} does not have URL scheme".format(address)) - if not parsed.netloc: - raise exceptions.ValidationException("Address {} does not have network location" - .format(address)) - if parsed.scheme not in schemes: - raise exceptions.ValidationException("Address {} scheme is not in {}" - .format(address, schemes)) - - -def _parse_and_validate_period(period, valid_periods): - try: - period = int(period) - except Exception: - raise exceptions.ValidationException("Period {} must be a valid integer".format(period)) - if period != 0 and period not in valid_periods: - raise exceptions.ValidationException( - "{} is not in the configured list of valid periods: {}".format(period, valid_periods)) - return period diff --git a/monasca_api/v2/common/utils.py b/monasca_api/v2/common/utils.py deleted file mode 100644 index 089240dc8..000000000 --- a/monasca_api/v2/common/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def date_handler(obj): - return obj.isoformat() if hasattr(obj, 'isoformat') else obj diff --git a/monasca_api/v2/common/validation.py b/monasca_api/v2/common/validation.py deleted file mode 100644 index 4234c4742..000000000 --- a/monasca_api/v2/common/validation.py +++ /dev/null @@ -1,68 +0,0 @@ -# (C) Copyright 2015-2017 Hewlett Packard Enterprise Development LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError - -import re - -VALID_ALARM_STATES = ["ALARM", "OK", "UNDETERMINED"] - -VALID_ALARM_DEFINITION_SEVERITIES = ["LOW", "MEDIUM", "HIGH", "CRITICAL"] - -EMAIL_PATTERN = '^.+@.+$' - - -def validate_alarm_state(state): - if state.upper() not in VALID_ALARM_STATES: - raise HTTPUnprocessableEntityError("Invalid State", - "State {} must be one of {}".format(state.encode('utf8'), - VALID_ALARM_STATES)) - - -def validate_alarm_definition_severity(severity): - if severity.upper() not in VALID_ALARM_DEFINITION_SEVERITIES: - raise HTTPUnprocessableEntityError( - "Invalid Severity", - "Severity {} must be one of {}".format(severity.encode('utf8'), - VALID_ALARM_DEFINITION_SEVERITIES)) - - -def validate_severity_query(severity_str): - severities = severity_str.split('|') - for severity in severities: - validate_alarm_definition_severity(severity) - - -def validate_sort_by(sort_by_list, allowed_sort_by): - for sort_by_field in sort_by_list: - sort_by_values = sort_by_field.split() - if len(sort_by_values) > 2: - raise HTTPUnprocessableEntityError("Unprocessable Entity", - "Invalid sort_by {}".format(sort_by_field)) - if sort_by_values[0] not in allowed_sort_by: - raise HTTPUnprocessableEntityError("Unprocessable Entity", - "sort_by field {} must be one of [{}]".format( - sort_by_values[0], - ','.join(list(allowed_sort_by)))) - if len(sort_by_values) > 1 and sort_by_values[1] not in ['asc', 'desc']: - raise HTTPUnprocessableEntityError("Unprocessable Entity", - "sort_by value {} must be 'asc' or 'desc'".format( - sort_by_values[1])) - - -def validate_email_address(email): - if re.match(EMAIL_PATTERN, email) is None: - return False - else: - return True diff --git a/monasca_api/v2/reference/__init__.py b/monasca_api/v2/reference/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/monasca_api/v2/reference/alarm_definitions.py b/monasca_api/v2/reference/alarm_definitions.py deleted file mode 100644 index 7a7af2ef8..000000000 --- a/monasca_api/v2/reference/alarm_definitions.py +++ /dev/null @@ -1,756 +0,0 @@ -# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -import falcon -from monasca_common.simport import simport -from monasca_common.validation import metrics as metric_validation -from oslo_config import cfg -from oslo_log import log -from oslo_utils import encodeutils -import pyparsing -import six - -from monasca_api.api import alarm_definitions_api_v2 -from monasca_api.common.repositories import exceptions -import monasca_api.expression_parser.alarm_expr_parser -from monasca_api.v2.common.exceptions import HTTPBadRequestError -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.common.schemas import ( - alarm_definition_request_body_schema as schema_alarms) -from monasca_api.v2.common import validation -from monasca_api.v2.reference import alarming -from monasca_api.v2.reference import helpers -from monasca_api.v2.reference import resource - -LOG = log.getLogger(__name__) - - -class AlarmDefinitions(alarm_definitions_api_v2.AlarmDefinitionsV2API, - alarming.Alarming): - - def __init__(self): - try: - super(AlarmDefinitions, self).__init__() - self._region = cfg.CONF.region - self._alarm_definitions_repo = simport.load( - cfg.CONF.repositories.alarm_definitions_driver)() - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @resource.resource_try_catch_block - def on_post(self, req, res): - helpers.validate_authorization(req, ['api:alarms:definition:post']) - - alarm_definition = helpers.from_json(req) - - self._validate_alarm_definition(alarm_definition) - - name = get_query_alarm_definition_name(alarm_definition) - expression = get_query_alarm_definition_expression(alarm_definition) - description = get_query_alarm_definition_description(alarm_definition) - severity = get_query_alarm_definition_severity(alarm_definition) - match_by = get_query_alarm_definition_match_by(alarm_definition) - alarm_actions = get_query_alarm_definition_alarm_actions( - alarm_definition) - undetermined_actions = get_query_alarm_definition_undetermined_actions( - alarm_definition) - ok_actions = get_query_ok_actions(alarm_definition) - - result = self._alarm_definition_create(req.project_id, name, expression, - description, severity, match_by, - alarm_actions, - undetermined_actions, - ok_actions) - - helpers.add_links_to_resource(result, req.uri) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_201 - - @resource.resource_try_catch_block - def on_get(self, req, res, alarm_definition_id=None): - helpers.validate_authorization(req, ['api:alarms:definition:get']) - if alarm_definition_id is None: - name = helpers.get_query_name(req) - dimensions = helpers.get_query_dimensions(req) - severity = helpers.get_query_param(req, "severity", default_val=None) - if severity is not None: - validation.validate_severity_query(severity) - severity = severity.upper() - sort_by = helpers.get_query_param(req, 'sort_by', default_val=None) - if sort_by is not None: - if isinstance(sort_by, six.string_types): - sort_by = sort_by.split(',') - - allowed_sort_by = {'id', 'name', 'severity', - 'updated_at', 'created_at'} - - validation.validate_sort_by(sort_by, allowed_sort_by) - - offset = helpers.get_query_param(req, 'offset') - if offset is not None and not isinstance(offset, int): - try: - offset = int(offset) - except Exception: - raise HTTPUnprocessableEntityError( - 'Unprocessable Entity', - 'Offset value {} must be an integer'.format(offset)) - result = self._alarm_definition_list(req.project_id, name, - dimensions, severity, - req.uri, sort_by, - offset, req.limit) - - else: - result = self._alarm_definition_show(req.project_id, - alarm_definition_id) - - helpers.add_links_to_resource(result, - re.sub('/' + alarm_definition_id, '', - req.uri)) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_put(self, req, res, alarm_definition_id=None): - - if not alarm_definition_id: - raise HTTPBadRequestError('Bad Request', 'Alarm definition ID not provided') - - helpers.validate_authorization(req, ['api:alarms:definition:put']) - - alarm_definition = helpers.from_json(req) - - self._validate_alarm_definition(alarm_definition, require_all=True) - - name = get_query_alarm_definition_name(alarm_definition) - expression = get_query_alarm_definition_expression(alarm_definition) - actions_enabled = ( - get_query_alarm_definition_actions_enabled(alarm_definition)) - description = get_query_alarm_definition_description(alarm_definition) - alarm_actions = get_query_alarm_definition_alarm_actions(alarm_definition) - ok_actions = get_query_ok_actions(alarm_definition) - undetermined_actions = get_query_alarm_definition_undetermined_actions( - alarm_definition) - match_by = get_query_alarm_definition_match_by(alarm_definition) - severity = get_query_alarm_definition_severity(alarm_definition) - - result = self._alarm_definition_update_or_patch(req.project_id, - alarm_definition_id, - name, - expression, - actions_enabled, - description, - alarm_actions, - ok_actions, - undetermined_actions, - match_by, - severity, - patch=False) - - helpers.add_links_to_resource( - result, re.sub('/' + alarm_definition_id, '', req.uri)) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_patch(self, req, res, alarm_definition_id=None): - - if not alarm_definition_id: - raise HTTPBadRequestError('Bad Request', 'Alarm definition ID not provided') - - helpers.validate_authorization(req, ['api:alarms:definition:patch']) - - alarm_definition = helpers.from_json(req) - - # Optional args - name = get_query_alarm_definition_name(alarm_definition, - return_none=True) - expression = get_query_alarm_definition_expression(alarm_definition, - return_none=True) - actions_enabled = ( - get_query_alarm_definition_actions_enabled(alarm_definition, - return_none=True)) - - description = get_query_alarm_definition_description(alarm_definition, - return_none=True) - alarm_actions = get_query_alarm_definition_alarm_actions( - alarm_definition, return_none=True) - ok_actions = get_query_ok_actions(alarm_definition, return_none=True) - undetermined_actions = get_query_alarm_definition_undetermined_actions( - alarm_definition, return_none=True) - match_by = get_query_alarm_definition_match_by(alarm_definition, - return_none=True) - severity = get_query_alarm_definition_severity(alarm_definition, - return_none=True) - - result = self._alarm_definition_update_or_patch(req.project_id, - alarm_definition_id, - name, - expression, - actions_enabled, - description, - alarm_actions, - ok_actions, - undetermined_actions, - match_by, - severity, - patch=True) - - helpers.add_links_to_resource( - result, re.sub('/' + alarm_definition_id, '', req.uri)) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_delete(self, req, res, alarm_definition_id=None): - - if not alarm_definition_id: - raise HTTPBadRequestError('Bad Request', 'Alarm definition ID not provided') - - helpers.validate_authorization(req, ['api:alarms:definition:delete']) - self._alarm_definition_delete(req.project_id, alarm_definition_id) - res.status = falcon.HTTP_204 - - def _validate_name_not_conflicting(self, tenant_id, name, expected_id=None): - definitions = self._alarm_definitions_repo.get_alarm_definitions(tenant_id=tenant_id, - name=name, - dimensions=None, - severity=None, - sort_by=None, - offset=None, - limit=0) - if definitions: - if not expected_id: - LOG.warning( - "Found existing definition for {} with tenant_id {}".format(name, tenant_id)) - raise exceptions.AlreadyExistsException( - "An alarm definition with the name {} already exists" .format(name)) - - found_definition_id = definitions[0]['id'] - if found_definition_id != expected_id: - LOG.warning( - "Found existing alarm definition for {} with tenant_id {} with unexpected id {}" - .format(name, tenant_id, found_definition_id)) - raise exceptions.AlreadyExistsException( - "An alarm definition with the name {} already exists with id {}" - .format(name, found_definition_id)) - - def _alarm_definition_show(self, tenant_id, id): - - alarm_definition_row = ( - self._alarm_definitions_repo.get_alarm_definition(tenant_id, id)) - - return self._build_alarm_definition_show_result(alarm_definition_row) - - def _build_alarm_definition_show_result(self, alarm_definition_row): - - match_by = get_comma_separated_str_as_list( - alarm_definition_row['match_by']) - - alarm_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['alarm_actions']) - - ok_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['ok_actions']) - - undetermined_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['undetermined_actions']) - - description = (alarm_definition_row['description'] - if alarm_definition_row['description'] is not None else None) - - expression = alarm_definition_row['expression'] - is_deterministic = is_definition_deterministic(expression) - - result = { - u'actions_enabled': alarm_definition_row['actions_enabled'] == 1, - u'alarm_actions': alarm_actions_list, - u'undetermined_actions': undetermined_actions_list, - u'ok_actions': ok_actions_list, - u'description': description, - u'expression': expression, - u'deterministic': is_deterministic, - u'id': alarm_definition_row['id'], - u'match_by': match_by, - u'name': alarm_definition_row['name'], - u'severity': alarm_definition_row['severity'].upper() - } - - return result - - def _alarm_definition_delete(self, tenant_id, id): - - sub_alarm_definition_rows = ( - self._alarm_definitions_repo.get_sub_alarm_definitions(id)) - alarm_metric_rows = self._alarm_definitions_repo.get_alarm_metrics( - tenant_id, id) - sub_alarm_rows = self._alarm_definitions_repo.get_sub_alarms( - tenant_id, id) - - if not self._alarm_definitions_repo.delete_alarm_definition( - tenant_id, id): - raise falcon.HTTPNotFound - - self._send_alarm_definition_deleted_event(id, - sub_alarm_definition_rows) - - self._send_alarm_event(u'alarm-deleted', tenant_id, id, - alarm_metric_rows, sub_alarm_rows, None, None) - - def _alarm_definition_list(self, tenant_id, name, dimensions, severity, req_uri, sort_by, - offset, limit): - - alarm_definition_rows = ( - self._alarm_definitions_repo.get_alarm_definitions(tenant_id, name, - dimensions, severity, sort_by, - offset, limit)) - - result = [] - for alarm_definition_row in alarm_definition_rows: - match_by = get_comma_separated_str_as_list( - alarm_definition_row['match_by']) - - alarm_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['alarm_actions']) - - ok_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['ok_actions']) - - undetermined_actions_list = get_comma_separated_str_as_list( - alarm_definition_row['undetermined_actions']) - - expression = alarm_definition_row['expression'] - is_deterministic = is_definition_deterministic(expression) - ad = {u'id': alarm_definition_row['id'], - u'name': alarm_definition_row['name'], - u'description': alarm_definition_row['description'] if ( - alarm_definition_row['description']) else u'', - u'expression': alarm_definition_row['expression'], - u'deterministic': is_deterministic, - u'match_by': match_by, - u'severity': alarm_definition_row['severity'].upper(), - u'actions_enabled': - alarm_definition_row['actions_enabled'] == 1, - u'alarm_actions': alarm_actions_list, - u'ok_actions': ok_actions_list, - u'undetermined_actions': undetermined_actions_list} - - helpers.add_links_to_resource(ad, req_uri) - result.append(ad) - - result = helpers.paginate_alarming(result, req_uri, limit) - - return result - - def _validate_alarm_definition(self, alarm_definition, require_all=False): - - try: - schema_alarms.validate(alarm_definition, require_all=require_all) - if 'match_by' in alarm_definition: - for name in alarm_definition['match_by']: - metric_validation.validate_dimension_key(name) - - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - def _alarm_definition_update_or_patch(self, tenant_id, - definition_id, - name, - expression, - actions_enabled, - description, - alarm_actions, - ok_actions, - undetermined_actions, - match_by, - severity, - patch): - - if expression: - try: - sub_expr_list = ( - monasca_api.expression_parser.alarm_expr_parser. - AlarmExprParser(expression).sub_expr_list) - - except (pyparsing.ParseException, - pyparsing.ParseFatalException) as ex: - LOG.exception(ex) - title = "Invalid alarm expression" - msg = "parser failed on expression '{}' at column {}: {}".format( - expression.encode('utf8'), str(ex.column).encode('utf8'), - ex.msg.encode('utf8')) - raise HTTPUnprocessableEntityError(title, msg) - else: - sub_expr_list = None - - if name: - self._validate_name_not_conflicting(tenant_id, name, expected_id=definition_id) - - alarm_def_row, sub_alarm_def_dicts = ( - self._alarm_definitions_repo.update_or_patch_alarm_definition( - tenant_id, - definition_id, - name, - expression, - sub_expr_list, - actions_enabled, - description, - alarm_actions, - ok_actions, - undetermined_actions, - match_by, - severity, - patch)) - - old_sub_alarm_def_event_dict = ( - self._build_sub_alarm_def_update_dict( - sub_alarm_def_dicts['old'])) - - new_sub_alarm_def_event_dict = ( - self._build_sub_alarm_def_update_dict(sub_alarm_def_dicts[ - 'new'])) - - changed_sub_alarm_def_event_dict = ( - self._build_sub_alarm_def_update_dict(sub_alarm_def_dicts[ - 'changed'])) - - unchanged_sub_alarm_def_event_dict = ( - self._build_sub_alarm_def_update_dict(sub_alarm_def_dicts[ - 'unchanged'])) - - result = self._build_alarm_definition_show_result(alarm_def_row) - # Not all of the passed in parameters will be set if this called - # from on_patch vs on_update. The alarm-definition-updated event - # MUST have all of the fields set so use the dict built from the - # data returned from the database - alarm_def_event_dict = ( - {u'tenantId': tenant_id, - u'alarmDefinitionId': definition_id, - u'alarmName': result['name'], - u'alarmDescription': result['description'], - u'alarmExpression': result['expression'], - u'severity': result['severity'], - u'matchBy': result['match_by'], - u'alarmActionsEnabled': result['actions_enabled'], - u'oldAlarmSubExpressions': old_sub_alarm_def_event_dict, - u'changedSubExpressions': changed_sub_alarm_def_event_dict, - u'unchangedSubExpressions': unchanged_sub_alarm_def_event_dict, - u'newAlarmSubExpressions': new_sub_alarm_def_event_dict}) - - alarm_definition_updated_event = ( - {u'alarm-definition-updated': alarm_def_event_dict}) - - self.send_event(self.events_message_queue, - alarm_definition_updated_event) - - return result - - def _build_sub_alarm_def_update_dict(self, sub_alarm_def_dict): - - sub_alarm_def_update_dict = {} - for id, sub_alarm_def in sub_alarm_def_dict.items(): - dimensions = {} - for name, value in sub_alarm_def.dimensions.items(): - dimensions[name] = value - sub_alarm_def_update_dict[sub_alarm_def.id] = {} - sub_alarm_def_update_dict[sub_alarm_def.id][u'function'] = ( - sub_alarm_def.function) - sub_alarm_def_update_dict[sub_alarm_def.id][ - u'metricDefinition'] = ( - {u'name': sub_alarm_def.metric_name, - u'dimensions': dimensions}) - sub_alarm_def_update_dict[sub_alarm_def.id][u'operator'] = ( - sub_alarm_def.operator) - sub_alarm_def_update_dict[sub_alarm_def.id][u'threshold'] = ( - sub_alarm_def.threshold) - sub_alarm_def_update_dict[sub_alarm_def.id][u'period'] = ( - sub_alarm_def.period) - sub_alarm_def_update_dict[sub_alarm_def.id][u'periods'] = ( - sub_alarm_def.periods) - sub_alarm_def_update_dict[sub_alarm_def.id][u'expression'] = ( - sub_alarm_def.expression) - - return sub_alarm_def_update_dict - - def _alarm_definition_create(self, tenant_id, name, expression, - description, severity, match_by, - alarm_actions, undetermined_actions, - ok_actions): - try: - - sub_expr_list = ( - monasca_api.expression_parser.alarm_expr_parser. - AlarmExprParser(expression).sub_expr_list) - - except (pyparsing.ParseException, - pyparsing.ParseFatalException) as ex: - LOG.exception(ex) - title = u"Invalid alarm expression" - msg = u"parser failed on expression '{}' at column {}: {}".format( - encodeutils.safe_decode(expression, 'utf-8'), - encodeutils.safe_decode(str(ex.column), 'utf-8'), - encodeutils.safe_decode(ex.msg, 'utf-8')) - raise HTTPUnprocessableEntityError(title, msg) - - self._validate_name_not_conflicting(tenant_id, name) - - alarm_definition_id = ( - self._alarm_definitions_repo. - create_alarm_definition(tenant_id, - name, - expression, - sub_expr_list, - description, - severity, - match_by, - alarm_actions, - undetermined_actions, - ok_actions)) - - self._send_alarm_definition_created_event(tenant_id, - alarm_definition_id, - name, expression, - sub_expr_list, - description, match_by) - result = ( - {u'alarm_actions': alarm_actions, u'ok_actions': ok_actions, - u'description': description, u'match_by': match_by, - u'severity': severity, u'actions_enabled': True, - u'undetermined_actions': undetermined_actions, - u'expression': expression, u'id': alarm_definition_id, - u'deterministic': is_definition_deterministic(expression), - u'name': name}) - - return result - - def _send_alarm_definition_deleted_event(self, alarm_definition_id, - sub_alarm_definition_rows): - - sub_alarm_definition_deleted_event_msg = {} - alarm_definition_deleted_event_msg = {u"alarm-definition-deleted": { - u"alarmDefinitionId": alarm_definition_id, - u'subAlarmMetricDefinitions': - sub_alarm_definition_deleted_event_msg}} - - for sub_alarm_definition in sub_alarm_definition_rows: - sub_alarm_definition_deleted_event_msg[ - sub_alarm_definition['id']] = { - u'name': sub_alarm_definition['metric_name']} - dimensions = {} - sub_alarm_definition_deleted_event_msg[sub_alarm_definition['id']][ - u'dimensions'] = dimensions - if sub_alarm_definition['dimensions']: - for dimension in sub_alarm_definition['dimensions'].split(','): - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - self.send_event(self.events_message_queue, - alarm_definition_deleted_event_msg) - - def _send_alarm_definition_created_event(self, tenant_id, - alarm_definition_id, name, - expression, sub_expr_list, - description, match_by): - - alarm_definition_created_event_msg = { - u'alarm-definition-created': {u'tenantId': tenant_id, - u'alarmDefinitionId': - alarm_definition_id, - u'alarmName': name, - u'alarmDescription': description, - u'alarmExpression': expression, - u'matchBy': match_by}} - - sub_expr_event_msg = {} - for sub_expr in sub_expr_list: - sub_expr_event_msg[sub_expr.id] = { - u'function': sub_expr.normalized_func} - metric_definition = {u'name': sub_expr.metric_name} - sub_expr_event_msg[sub_expr.id][ - u'metricDefinition'] = metric_definition - dimensions = {} - for dimension in sub_expr.dimensions_as_list: - parsed_dimension = dimension.split("=") - dimensions[parsed_dimension[0]] = parsed_dimension[1] - metric_definition[u'dimensions'] = dimensions - sub_expr_event_msg[sub_expr.id][ - u'operator'] = sub_expr.normalized_operator - sub_expr_event_msg[sub_expr.id][u'threshold'] = sub_expr.threshold - sub_expr_event_msg[sub_expr.id][u'period'] = sub_expr.period - sub_expr_event_msg[sub_expr.id][u'periods'] = sub_expr.periods - sub_expr_event_msg[sub_expr.id][ - u'expression'] = sub_expr.fmtd_sub_expr_str - - alarm_definition_created_event_msg[u'alarm-definition-created'][ - u'alarmSubExpressions'] = sub_expr_event_msg - - self.send_event(self.events_message_queue, - alarm_definition_created_event_msg) - - -def get_query_alarm_definition_name(alarm_definition, return_none=False): - try: - if 'name' in alarm_definition: - name = alarm_definition['name'] - return name - else: - if return_none: - return None - else: - raise Exception("Missing name") - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_alarm_definition_expression(alarm_definition, - return_none=False): - try: - if 'expression' in alarm_definition: - expression = alarm_definition['expression'] - return expression - else: - if return_none: - return None - else: - raise Exception("Missing expression") - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_alarm_definition_description(alarm_definition, - return_none=False): - if 'description' in alarm_definition: - return alarm_definition['description'] - else: - if return_none: - return None - else: - return '' - - -def get_query_alarm_definition_severity(alarm_definition, return_none=False): - if 'severity' in alarm_definition: - severity = encodeutils.safe_decode(alarm_definition['severity'], 'utf-8').upper() - if severity not in ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL']: - raise HTTPUnprocessableEntityError('Unprocessable Entity', 'Invalid severity') - return severity - else: - if return_none: - return None - else: - return 'LOW' - - -def get_query_alarm_definition_match_by(alarm_definition, return_none=False): - if 'match_by' in alarm_definition: - match_by = alarm_definition['match_by'] - return match_by - else: - if return_none: - return None - else: - return [] - - -def get_query_alarm_definition_alarm_actions(alarm_definition, - return_none=False): - if 'alarm_actions' in alarm_definition: - alarm_actions = alarm_definition['alarm_actions'] - return alarm_actions - else: - if return_none: - return None - else: - return [] - - -def get_query_alarm_definition_undetermined_actions(alarm_definition, - return_none=False): - if 'undetermined_actions' in alarm_definition: - undetermined_actions = alarm_definition['undetermined_actions'] - return undetermined_actions - else: - if return_none: - return None - else: - return [] - - -def get_query_ok_actions(alarm_definition, return_none=False): - if 'ok_actions' in alarm_definition: - ok_actions = alarm_definition['ok_actions'] - return ok_actions - else: - if return_none: - return None - else: - return [] - - -def get_query_alarm_definition_actions_enabled(alarm_definition, - required=False, - return_none=False): - try: - if 'actions_enabled' in alarm_definition: - enabled_actions = alarm_definition['actions_enabled'] - return enabled_actions - else: - if return_none: - return None - elif required: - raise Exception("Missing actions-enabled") - else: - return '' - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_comma_separated_str_as_list(comma_separated_str): - if not comma_separated_str: - return [] - else: - return encodeutils.safe_decode(comma_separated_str, 'utf-8').split(',') - - -def is_definition_deterministic(expression): - """Evaluates if found expression is deterministic or not. - - In order to do that expression is parsed into sub expressions. - Each sub expression needs to be deterministic in order for - entity expression to be such. - - Otherwise expression is non-deterministic. - - :param str expression: expression to be evaluated - :return: true/false - :rtype: bool - """ - expr_parser = (monasca_api.expression_parser - .alarm_expr_parser.AlarmExprParser(expression)) - sub_expressions = expr_parser.sub_expr_list - - for sub_expr in sub_expressions: - if not sub_expr.deterministic: - return False - - return True diff --git a/monasca_api/v2/reference/alarming.py b/monasca_api/v2/reference/alarming.py deleted file mode 100644 index c9e59b430..000000000 --- a/monasca_api/v2/reference/alarming.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2014,2016 Hewlett Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from monasca_common.simport import simport -from oslo_config import cfg -from oslo_log import log - -from monasca_api.common.messaging import ( - exceptions as message_queue_exceptions) -import monasca_api.expression_parser.alarm_expr_parser -from monasca_api.v2.reference import helpers - -LOG = log.getLogger(__name__) - - -class Alarming(object): - """Super class for Alarms and AlarmDefinitions. - - Shared attributes and methods for classes Alarms and AlarmDefinitions. - """ - - def __init__(self): - - super(Alarming, self).__init__() - - self.events_message_queue = simport.load( - cfg.CONF.messaging.driver)(cfg.CONF.kafka.events_topic) - - self.alarm_state_transitions_message_queue = simport.load( - cfg.CONF.messaging.driver)(cfg.CONF.kafka.alarm_state_transitions_topic) - - def _send_alarm_transitioned_event(self, tenant_id, alarm_id, - alarm_definition_row, - alarm_metric_rows, - old_state, new_state, - link, lifecycle_state, - time_ms): - - # This is a change via the API, so there is no SubAlarm info to add - sub_alarms = [] - metrics = [] - alarm_transitioned_event_msg = {u'alarm-transitioned': { - u'tenantId': tenant_id, - u'alarmId': alarm_id, - u'alarmDefinitionId': alarm_definition_row['id'], - u'alarmName': alarm_definition_row['name'], - u'alarmDescription': alarm_definition_row['description'], - u'actionsEnabled': alarm_definition_row['actions_enabled'] == 1, - u'stateChangeReason': 'Alarm state updated via API', - u'severity': alarm_definition_row['severity'], - u'link': link, - u'lifecycleState': lifecycle_state, - u'oldState': old_state, - u'newState': new_state, - u'timestamp': time_ms, - u'subAlarms': sub_alarms, - u'metrics': metrics} - } - - for alarm_metric_row in alarm_metric_rows: - metric = self._build_metric(alarm_metric_row) - metrics.append(metric) - - self.send_event(self.alarm_state_transitions_message_queue, - alarm_transitioned_event_msg) - - def _build_metric(self, alarm_metric_row): - - dimensions = {} - - metric = {u'name': alarm_metric_row['name'], - u'dimensions': dimensions} - - if alarm_metric_row['dimensions']: - for dimension in alarm_metric_row['dimensions'].split(','): - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - return metric - - def _send_alarm_event(self, event_type, tenant_id, alarm_definition_id, - alarm_metric_rows, sub_alarm_rows, link, lifecycle_state, - extra_info=None): - - if not alarm_metric_rows: - return - - # Build a dict mapping alarm id -> list of sub alarms. - sub_alarm_dict = {} - for sub_alarm_row in sub_alarm_rows: - if sub_alarm_row['alarm_id'] in sub_alarm_dict: - sub_alarm_dict[sub_alarm_row['alarm_id']] += [sub_alarm_row] - else: - sub_alarm_dict[sub_alarm_row['alarm_id']] = [sub_alarm_row] - - # Forward declaration. - alarm_event_msg = {} - prev_alarm_id = None - for alarm_metric_row in alarm_metric_rows: - if prev_alarm_id != alarm_metric_row['alarm_id']: - if prev_alarm_id is not None: - sub_alarms_event_msg = ( - self._build_sub_alarm_event_msg(sub_alarm_dict, - prev_alarm_id)) - alarm_event_msg[event_type][u'subAlarms'] = sub_alarms_event_msg - self.send_event(self.events_message_queue, - alarm_event_msg) - - alarm_metrics_event_msg = [] - alarm_event_msg = {event_type: {u'tenantId': tenant_id, - u'alarmDefinitionId': - alarm_definition_id, - u'alarmId': alarm_metric_row[ - 'alarm_id'], - u'link': link, - u'lifecycleState': lifecycle_state, - u'alarmMetrics': - alarm_metrics_event_msg}} - if extra_info: - alarm_event_msg[event_type].update(extra_info) - - prev_alarm_id = alarm_metric_row['alarm_id'] - - metric = self._build_metric(alarm_metric_row) - alarm_metrics_event_msg.append(metric) - - # Finish last alarm - sub_alarms_event_msg = self._build_sub_alarm_event_msg(sub_alarm_dict, - prev_alarm_id) - alarm_event_msg[event_type][u'subAlarms'] = sub_alarms_event_msg - - self.send_event(self.events_message_queue, - alarm_event_msg) - - def _build_sub_alarm_event_msg(self, sub_alarm_dict, alarm_id): - - sub_alarms_event_msg = {} - - if alarm_id not in sub_alarm_dict: - return sub_alarms_event_msg - - for sub_alarm in sub_alarm_dict[alarm_id]: - # There's only one expr in a sub alarm, so just take the first. - sub_expr = ( - monasca_api.expression_parser.alarm_expr_parser. - AlarmExprParser(sub_alarm['expression']).sub_expr_list[0]) - dimensions = {} - sub_alarms_event_msg[sub_alarm['sub_alarm_id']] = { - u'function': sub_expr.normalized_func, - u'metricDefinition': {u'name': sub_expr.metric_name, - u'dimensions': dimensions}, - u'operator': sub_expr.normalized_operator, - u'threshold': sub_expr.threshold, u'period': sub_expr.period, - u'periods': sub_expr.periods, - u'expression': sub_expr.fmtd_sub_expr_str} - - for dimension in sub_expr.dimensions_as_list: - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - return sub_alarms_event_msg - - def send_event(self, message_queue, event_msg): - try: - message_queue.send_message(helpers.to_json(event_msg)) - except message_queue_exceptions.MessageQueueException as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError( - 'Message queue service unavailable'.encode('utf8'), - str(ex).encode('utf8')) diff --git a/monasca_api/v2/reference/alarms.py b/monasca_api/v2/reference/alarms.py deleted file mode 100644 index 22527feb5..000000000 --- a/monasca_api/v2/reference/alarms.py +++ /dev/null @@ -1,516 +0,0 @@ -# Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -import falcon -from monasca_common.simport import simport -from oslo_config import cfg -from oslo_log import log -import six - -from monasca_api.api import alarms_api_v2 -from monasca_api.common.repositories import exceptions -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.common.schemas import alarm_update_schema as schema_alarm -from monasca_api.v2.common import validation -from monasca_api.v2.reference import alarming -from monasca_api.v2.reference import helpers -from monasca_api.v2.reference import resource - -LOG = log.getLogger(__name__) - - -class Alarms(alarms_api_v2.AlarmsV2API, - alarming.Alarming): - def __init__(self): - try: - super(Alarms, self).__init__() - self._region = cfg.CONF.region - self._alarms_repo = simport.load( - cfg.CONF.repositories.alarms_driver)() - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @resource.resource_try_catch_block - def on_put(self, req, res, alarm_id): - - helpers.validate_authorization(req, ['api:alarms:put']) - - alarm = helpers.from_json(req) - schema_alarm.validate(alarm) - - # Validator makes state optional, so check it here - if 'state' not in alarm or not alarm['state']: - raise HTTPUnprocessableEntityError('Unprocessable Entity', - "Field 'state' is required") - if 'lifecycle_state' not in alarm or not alarm['lifecycle_state']: - raise HTTPUnprocessableEntityError('Unprocessable Entity', - "Field 'lifecycle_state' is required") - if 'link' not in alarm or not alarm['link']: - raise HTTPUnprocessableEntityError('Unprocessable Entity', - "Field 'link' is required") - - self._alarm_update(req.project_id, alarm_id, alarm['state'], - alarm['lifecycle_state'], alarm['link']) - - result = self._alarm_show(req.uri, req.project_id, alarm_id) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_patch(self, req, res, alarm_id): - - helpers.validate_authorization(req, ['api:alarms:patch']) - - alarm = helpers.from_json(req) - schema_alarm.validate(alarm) - - old_alarm = self._alarms_repo.get_alarm(req.project_id, alarm_id)[0] - - # if a field is not present or is None, replace it with the old value - if 'state' not in alarm or not alarm['state']: - alarm['state'] = old_alarm['state'] - if 'lifecycle_state' not in alarm or alarm['lifecycle_state'] is None: - alarm['lifecycle_state'] = old_alarm['lifecycle_state'] - if 'link' not in alarm or alarm['link'] is None: - alarm['link'] = old_alarm['link'] - - self._alarm_patch(req.project_id, alarm_id, alarm['state'], - alarm['lifecycle_state'], alarm['link']) - - result = self._alarm_show(req.uri, req.project_id, alarm_id) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_delete(self, req, res, alarm_id): - - helpers.validate_authorization(req, ['api:alarms:delete']) - - self._alarm_delete(req.project_id, alarm_id) - - res.status = falcon.HTTP_204 - - @resource.resource_try_catch_block - def on_get(self, req, res, alarm_id=None): - helpers.validate_authorization(req, ['api:alarms:get']) - - if alarm_id is None: - query_parms = falcon.uri.parse_query_string(req.query_string) - if 'state' in query_parms: - validation.validate_alarm_state(query_parms['state']) - query_parms['state'] = query_parms['state'].upper() - - if 'severity' in query_parms: - validation.validate_severity_query(query_parms['severity']) - query_parms['severity'] = query_parms['severity'].upper() - - if 'sort_by' in query_parms: - if isinstance(query_parms['sort_by'], six.string_types): - query_parms['sort_by'] = query_parms['sort_by'].split(',') - - allowed_sort_by = { - 'alarm_id', 'alarm_definition_id', 'alarm_definition_name', - 'state', 'severity', 'lifecycle_state', 'link', - 'state_updated_timestamp', 'updated_timestamp', 'created_timestamp'} - validation.validate_sort_by(query_parms['sort_by'], allowed_sort_by) - - query_parms['metric_dimensions'] = helpers.get_query_dimensions( - req, 'metric_dimensions') - helpers.validate_query_dimensions(query_parms['metric_dimensions']) - - offset = helpers.get_query_param(req, 'offset') - if offset is not None and not isinstance(offset, int): - try: - offset = int(offset) - except Exception as ex: - LOG.exception(ex) - raise HTTPUnprocessableEntityError( - "Unprocessable Entity", - "Offset value {} must be an integer".format(offset)) - - result = self._alarm_list(req.uri, req.project_id, - query_parms, offset, - req.limit) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - else: - result = self._alarm_show(req.uri, req.project_id, alarm_id) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _alarm_update(self, tenant_id, alarm_id, new_state, lifecycle_state, - link): - - alarm_metric_rows = self._alarms_repo.get_alarm_metrics(alarm_id) - sub_alarm_rows = self._alarms_repo.get_sub_alarms(tenant_id, alarm_id) - - old_alarm, time_ms = self._alarms_repo.update_alarm(tenant_id, alarm_id, - new_state, - lifecycle_state, link) - old_state = old_alarm['state'] - # alarm_definition_id is the same for all rows. - alarm_definition_id = sub_alarm_rows[0]['alarm_definition_id'] - - state_info = {u'alarmState': new_state, u'oldAlarmState': old_state} - - self._send_alarm_event(u'alarm-updated', tenant_id, - alarm_definition_id, alarm_metric_rows, - sub_alarm_rows, link, lifecycle_state, state_info) - - if old_state != new_state: - try: - alarm_definition_row = self._alarms_repo.get_alarm_definition( - tenant_id, alarm_id) - except exceptions.DoesNotExistException: - # Alarm definition does not exist. May have been deleted - # in another transaction. In that case, all associated - # alarms were also deleted, so don't send transition events. - pass - else: - self._send_alarm_transitioned_event(tenant_id, alarm_id, - alarm_definition_row, - alarm_metric_rows, - old_state, new_state, - link, lifecycle_state, - time_ms) - - def _alarm_patch(self, tenant_id, alarm_id, new_state, lifecycle_state, - link): - - alarm_metric_rows = self._alarms_repo.get_alarm_metrics(alarm_id) - sub_alarm_rows = self._alarms_repo.get_sub_alarms(tenant_id, alarm_id) - - old_alarm, time_ms = self._alarms_repo.update_alarm(tenant_id, alarm_id, - new_state, - lifecycle_state, link) - - # alarm_definition_id is the same for all rows. - alarm_definition_id = sub_alarm_rows[0]['alarm_definition_id'] - - state_info = {u'alarmState': new_state, u'oldAlarmState': old_alarm['state']} - - self._send_alarm_event(u'alarm-updated', tenant_id, - alarm_definition_id, alarm_metric_rows, - sub_alarm_rows, link, lifecycle_state, state_info) - - if old_alarm['state'] != new_state: - try: - alarm_definition_row = self._alarms_repo.get_alarm_definition( - tenant_id, alarm_id) - except exceptions.DoesNotExistException: - # Alarm definition does not exist. May have been deleted - # in another transaction. In that case, all associated - # alarms were also deleted, so don't send transition events. - pass - else: - self._send_alarm_transitioned_event(tenant_id, alarm_id, - alarm_definition_row, - alarm_metric_rows, - old_alarm['state'], new_state, - link, lifecycle_state, - time_ms) - - def _alarm_delete(self, tenant_id, id): - - alarm_metric_rows = self._alarms_repo.get_alarm_metrics(id) - sub_alarm_rows = self._alarms_repo.get_sub_alarms(tenant_id, id) - - self._alarms_repo.delete_alarm(tenant_id, id) - - # alarm_definition_id is the same for all rows. - alarm_definition_id = sub_alarm_rows[0]['alarm_definition_id'] - - self._send_alarm_event(u'alarm-deleted', tenant_id, - alarm_definition_id, alarm_metric_rows, - sub_alarm_rows, None, None) - - def _alarm_show(self, req_uri, tenant_id, alarm_id): - - alarm_rows = self._alarms_repo.get_alarm(tenant_id, alarm_id) - - req_uri_no_id = req_uri.replace('/' + alarm_id, "") - first_row = True - for alarm_row in alarm_rows: - if first_row: - ad = {u'id': alarm_row['alarm_definition_id'], - u'name': alarm_row['alarm_definition_name'], - u'severity': alarm_row['severity'], } - helpers.add_links_to_resource(ad, - re.sub('alarms', - 'alarm-definitions', - req_uri_no_id)) - - metrics = [] - alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics, - u'state': alarm_row['state'], - u'lifecycle_state': alarm_row['lifecycle_state'], - u'link': alarm_row['link'], - u'state_updated_timestamp': - alarm_row['state_updated_timestamp'].isoformat() + - 'Z', - u'updated_timestamp': - alarm_row['updated_timestamp'].isoformat() + 'Z', - u'created_timestamp': - alarm_row['created_timestamp'].isoformat() + 'Z', - u'alarm_definition': ad} - helpers.add_links_to_resource(alarm, req_uri_no_id) - - first_row = False - - dimensions = {} - metric = {u'name': alarm_row['metric_name'], - u'dimensions': dimensions} - - if alarm_row['metric_dimensions']: - for dimension in alarm_row['metric_dimensions'].split(','): - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - metrics.append(metric) - - return alarm - - def _alarm_list(self, req_uri, tenant_id, query_parms, offset, limit): - - alarm_rows = self._alarms_repo.get_alarms(tenant_id, query_parms, - offset, limit) - - result = [] - if not alarm_rows: - return helpers.paginate_alarming(result, req_uri, limit) - - # Forward declaration - alarm = {} - prev_alarm_id = None - for alarm_row in alarm_rows: - if prev_alarm_id != alarm_row['alarm_id']: - if prev_alarm_id is not None: - result.append(alarm) - - ad = {u'id': alarm_row['alarm_definition_id'], - u'name': alarm_row['alarm_definition_name'], - u'severity': alarm_row['severity'], } - helpers.add_links_to_resource(ad, - re.sub('alarms', - 'alarm-definitions', - req_uri)) - - metrics = [] - alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics, - u'state': alarm_row['state'], - u'lifecycle_state': alarm_row['lifecycle_state'], - u'link': alarm_row['link'], - u'state_updated_timestamp': - alarm_row['state_updated_timestamp'].isoformat() + - 'Z', - u'updated_timestamp': - alarm_row['updated_timestamp'].isoformat() + 'Z', - u'created_timestamp': - alarm_row['created_timestamp'].isoformat() + 'Z', - u'alarm_definition': ad} - helpers.add_links_to_resource(alarm, req_uri) - - prev_alarm_id = alarm_row['alarm_id'] - - dimensions = {} - metric = {u'name': alarm_row['metric_name'], - u'dimensions': dimensions} - - if alarm_row['metric_dimensions']: - for dimension in alarm_row['metric_dimensions'].split(','): - parsed_dimension = dimension.split('=') - dimensions[parsed_dimension[0]] = parsed_dimension[1] - - metrics.append(metric) - - result.append(alarm) - - return helpers.paginate_alarming(result, req_uri, limit) - - -class AlarmsCount(alarms_api_v2.AlarmsCountV2API, alarming.Alarming): - - def __init__(self): - try: - super(AlarmsCount, self).__init__() - self._region = cfg.CONF.region - self._alarms_repo = simport.load( - cfg.CONF.repositories.alarms_driver)() - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:alarms:count']) - query_parms = falcon.uri.parse_query_string(req.query_string) - - if 'state' in query_parms: - validation.validate_alarm_state(query_parms['state']) - query_parms['state'] = query_parms['state'].upper() - - if 'severity' in query_parms: - validation.validate_severity_query(query_parms['severity']) - query_parms['severity'] = query_parms['severity'].upper() - - if 'group_by' in query_parms: - if not isinstance(query_parms['group_by'], list): - query_parms['group_by'] = query_parms['group_by'].split(',') - self._validate_group_by(query_parms['group_by']) - - query_parms['metric_dimensions'] = helpers.get_query_dimensions(req, 'metric_dimensions') - helpers.validate_query_dimensions(query_parms['metric_dimensions']) - - offset = helpers.get_query_param(req, 'offset') - - if offset is not None: - try: - offset = int(offset) - except Exception: - raise HTTPUnprocessableEntityError( - "Unprocessable Entity", - "Offset must be a valid integer, was {}".format(offset)) - - result = self._alarms_count(req.uri, req.project_id, query_parms, offset, req.limit) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _alarms_count(self, req_uri, tenant_id, query_parms, offset, limit): - - count_data = self._alarms_repo.get_alarms_count(tenant_id, query_parms, offset, limit) - group_by = query_parms['group_by'] if 'group_by' in query_parms else [] - - # result = count_data - result = { - 'links': [ - { - 'rel': 'self', - 'href': req_uri - } - ], - 'columns': ['count'] - } - - if len(count_data) == 0 or count_data[0]['count'] == 0: - count = [0] - if 'group_by' in query_parms: - for field in query_parms['group_by']: - result['columns'].append(field) - count.append(None) - result['counts'] = [count] - return result - - if len(count_data) > limit: - result['links'].append({ - 'rel': 'next', - 'href': helpers.create_alarms_count_next_link(req_uri, offset, limit)}) - count_data = count_data[:limit] - - result['columns'].extend(group_by) - - result['counts'] = [] - for row in count_data: - count_result = [row['count']] - for field in group_by: - count_result.append(row[field]) - result['counts'].append(count_result) - - return result - - def _validate_group_by(self, group_by): - allowed_values = {'alarm_definition_id', 'name', 'state', 'severity', - 'link', 'lifecycle_state', 'metric_name', - 'dimension_name', 'dimension_value'} - if not set(group_by).issubset(allowed_values): - raise HTTPUnprocessableEntityError( - "Unprocessable Entity", - "One or more group-by values from {} are not in {}" - .format(group_by, allowed_values)) - - -class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API, - alarming.Alarming): - def __init__(self): - try: - super(AlarmsStateHistory, self).__init__() - self._region = cfg.CONF.region - self._alarms_repo = simport.load( - cfg.CONF.repositories.alarms_driver)() - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise exceptions.RepositoryException(ex) - - @resource.resource_try_catch_block - def on_get(self, req, res, alarm_id=None): - helpers.validate_authorization(req, ['api:alarms:state_history']) - offset = helpers.get_query_param(req, 'offset') - - if alarm_id is None: - start_timestamp = helpers.get_query_starttime_timestamp(req, False) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - - dimensions = helpers.get_query_dimensions(req) - helpers.validate_query_dimensions(dimensions) - - result = self._alarm_history_list(req.project_id, start_timestamp, - end_timestamp, dimensions, - req.uri, offset, req.limit) - - else: - result = self._alarm_history(req.project_id, alarm_id, - req.uri, offset, - req.limit) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _alarm_history_list(self, tenant_id, start_timestamp, - end_timestamp, dimensions, req_uri, offset, - limit): - - # get_alarms expects 'metric_dimensions' for dimensions key. - new_query_parms = {'metric_dimensions': dimensions} - - alarm_rows = self._alarms_repo.get_alarms(tenant_id, new_query_parms, - None, None) - alarm_id_list = [alarm_row['alarm_id'] for alarm_row in alarm_rows] - - result = self._metrics_repo.alarm_history(tenant_id, alarm_id_list, - offset, limit, - start_timestamp, - end_timestamp) - - return helpers.paginate(result, req_uri, limit) - - def _alarm_history(self, tenant_id, alarm_id, req_uri, offset, limit): - - result = self._metrics_repo.alarm_history(tenant_id, [alarm_id], offset, - limit) - - return helpers.paginate(result, req_uri, limit) diff --git a/monasca_api/v2/reference/helpers.py b/monasca_api/v2/reference/helpers.py deleted file mode 100644 index 94b8bb9cf..000000000 --- a/monasca_api/v2/reference/helpers.py +++ /dev/null @@ -1,790 +0,0 @@ -# Copyright 2015 Cray Inc. All Rights Reserved. -# (C) Copyright 2014,2016-2017 Hewlett Packard Enterprise Development LP -# (C) Copyright 2017 SUSE LLC -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -import falcon -from oslo_log import log -from oslo_utils import encodeutils -from oslo_utils import timeutils -import six -import six.moves.urllib.parse as urlparse - -from monasca_api.common.rest import utils as rest_utils -from monasca_api import conf -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_common.validation import metrics as metric_validation - - -LOG = log.getLogger(__name__) -CONF = conf.CONF - - -def from_json(req): - """Read the json_msg from the http request body and return them as JSON. - - :param req: HTTP request object. - :return: Returns the metrics as a JSON object. - :raises falcon.HTTPBadRequest: - """ - try: - return req.get_media() - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPBadRequest(title='Bad request', - description='Request body is not valid JSON') - - -def to_json(data): - """Converts data to JSON string. - - :param dict data: data to be transformed to JSON - :return: JSON string - :rtype: str - :raises: Exception - """ - try: - # NOTE(trebskit) ensure_ascii => UTF-8 - return rest_utils.as_json(data, ensure_ascii=False) - except Exception as ex: - LOG.exception(ex) - raise - - -def validate_json_content_type(req): - if req.content_type not in ['application/json']: - raise falcon.HTTPBadRequest( - title='Bad request', - description='Bad content type. Must be application/json') - - -def validate_authorization(http_request, authorized_rules_list): - """Validates whether is authorized according to provided policy rules list. - - If authorization fails, 401 is thrown with appropriate description. - Additionally response specifies 'WWW-Authenticate' header with 'Token' - value challenging the client to use different token (the one with - different set of roles which can access the service). - """ - - challenge = 'Token' - for rule in authorized_rules_list: - try: - http_request.can(rule) - return - except Exception as ex: - LOG.debug(ex) - - raise falcon.HTTPUnauthorized(title='Forbidden', - description='The request does not have access to this service', - challenges=challenge) - - -def validate_payload_size(content_length): - """Validates payload size. - - Method validates payload size, this method used req.content_length to determinate - payload size - - [service] - max_log_size = 1048576 - - **max_log_size** refers to the maximum allowed content length. - If it is exceeded :py:class:`falcon.HTTPRequestEntityTooLarge` is - thrown. - - :param content_length: size of payload - - :exception: :py:class:`falcon.HTTPLengthRequired` - :exception: :py:class:`falcon.HTTPRequestEntityTooLarge` - - """ - max_size = CONF.log_publisher.max_log_size - - LOG.debug('Payload (content-length) is %s', str(content_length)) - - if content_length >= max_size: - raise falcon.HTTPPayloadTooLarge( - title='Log payload size exceeded', - description='Maximum allowed size is %d bytes' % max_size - ) - - -def get_x_tenant_or_tenant_id(http_request, delegate_authorized_rules_list): - params = falcon.uri.parse_query_string(http_request.query_string) - if 'tenant_id' in params: - tenant_id = params['tenant_id'] - - for rule in delegate_authorized_rules_list: - try: - http_request.can(rule) - return tenant_id - except Exception as ex: - LOG.debug(ex) - - return http_request.project_id - - -def get_query_param(req, param_name, required=False, default_val=None): - try: - params = falcon.uri.parse_query_string(req.query_string) - if param_name in params: - if isinstance(params[param_name], list): - param_val = encodeutils.safe_decode(params[param_name][0], 'utf8') - else: - param_val = encodeutils.safe_decode(params[param_name], 'utf8') - - return param_val - else: - if required: - raise Exception("Missing " + param_name) - else: - return default_val - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_name(req, name_required=False): - """Returns the query param "name" if supplied. - - :param req: HTTP request object. - """ - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'name' in params: - name = params['name'] - return name - else: - if name_required: - raise Exception("Missing name") - else: - return '' - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_dimensions(req, param_key='dimensions'): - """Gets and parses the query param dimensions. - - :param req: HTTP request object. - :param dimensions_param: param name for dimensions, default='dimensions' - :return: Returns the dimensions as a JSON object - :raises falcon.HTTPBadRequest: If dimensions are malformed. - """ - try: - params = falcon.uri.parse_query_string(req.query_string) - dimensions = {} - if param_key not in params: - return dimensions - - dimensions_param = params[param_key] - if isinstance(dimensions_param, six.string_types): - dimensions_str_array = dimensions_param.split(',') - elif isinstance(dimensions_param, list): - dimensions_str_array = [] - for sublist in dimensions_param: - dimensions_str_array.extend(sublist.split(",")) - else: - raise Exception("Error parsing dimensions, unknown format") - - for dimension in dimensions_str_array: - dimension_name_value = dimension.split(':', 1) - if len(dimension_name_value) == 2: - dimensions[dimension_name_value[0]] = dimension_name_value[1] - elif len(dimension_name_value) == 1: - dimensions[dimension_name_value[0]] = "" - return dimensions - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_starttime_timestamp(req, required=True): - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'start_time' in params: - return _convert_time_string(params['start_time']) - else: - if required: - raise Exception("Missing start time") - else: - return None - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_endtime_timestamp(req, required=True): - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'end_time' in params: - return _convert_time_string(params['end_time']) - else: - if required: - raise Exception("Missing end time") - else: - return None - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def validate_start_end_timestamps(start_timestamp, end_timestamp=None): - if end_timestamp: - if not start_timestamp < end_timestamp: - raise falcon.HTTPBadRequest(title='Bad request', - description='start_time must be before end_time') - - -def _convert_time_string(date_time_string): - dt = timeutils.parse_isotime(date_time_string) - dt = timeutils.normalize_time(dt) - timestamp = (dt - datetime.datetime(1970, 1, 1)).total_seconds() - return timestamp - - -def get_query_statistics(req): - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'statistics' in params: - statistics = [] - # falcon may return this as a list or as a string - if isinstance(params['statistics'], list): - statistics.extend(params['statistics']) - else: - statistics.extend(params['statistics'].split(',')) - statistics = [statistic.lower() for statistic in statistics] - if not all(statistic in ['avg', 'min', 'max', 'count', 'sum'] for - statistic in statistics): - raise Exception("Invalid statistic") - return statistics - else: - raise Exception("Missing statistics") - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_period(req): - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'period' in params: - period = params['period'] - try: - period = int(period) - except Exception: - raise Exception("Period must be a valid integer") - if period < 0: - raise Exception("Period must be a positive integer") - return str(period) - else: - return None - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def get_query_group_by(req): - try: - params = falcon.uri.parse_query_string(req.query_string) - if 'group_by' in params: - group_by = params['group_by'] - if not isinstance(group_by, list): - group_by = [group_by] - return group_by - else: - return None - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def validate_query_name(name): - """Validates the query param name. - - :param name: Query param name. - :raises falcon.HTTPBadRequest: If name is not valid. - """ - if not name: - return - try: - metric_validation.validate_name(name) - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def validate_query_dimensions(dimensions): - """Validates the query param dimensions. - - :param dimensions: Query param dimensions. - :raises falcon.HTTPBadRequest: If dimensions are not valid. - """ - try: - - for key, value in dimensions.items(): - if key.startswith('_'): - raise Exception("Dimension key {} may not start with '_'".format(key)) - metric_validation.validate_dimension_key(key) - if value: - if '|' in value: - values = value.split('|') - for v in values: - metric_validation.validate_dimension_value(key, v) - else: - metric_validation.validate_dimension_value(key, value) - except Exception as ex: - LOG.debug(ex) - raise HTTPUnprocessableEntityError('Unprocessable Entity', str(ex)) - - -def paginate(resource, uri, limit): - parsed_uri = urlparse.urlparse(uri) - - self_link = encodeutils.safe_decode(build_base_uri(parsed_uri), 'utf8') - - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - if resource and len(resource) > limit: - - if 'id' in resource[limit - 1]: - new_offset = resource[limit - 1]['id'] - - next_link = encodeutils.safe_decode(build_base_uri(parsed_uri), 'utf8') - - new_query_params = [u'offset' + '=' + urlparse.quote( - new_offset.encode('utf8'), safe='')] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - resource = {u'links': ([{u'rel': u'self', - u'href': self_link}, - {u'rel': u'next', - u'href': next_link}]), - u'elements': resource[:limit]} - - else: - - resource = {u'links': ([{u'rel': u'self', - u'href': encodeutils.safe_decode(self_link, 'utf-8')}]), - u'elements': resource} - - return resource - - -def paginate_with_no_id(dictionary_list, uri, offset, limit): - """This method is to paginate a list of dictionaries with no id in it. - For example, metric name list, directory name list and directory - value list. - """ - parsed_uri = urlparse.urlparse(uri) - self_link = encodeutils.safe_decode(build_base_uri(parsed_uri), 'utf-8') - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - value_list = [] - for item in dictionary_list: - value_list.extend(item.values()) - - if value_list: - # Truncate dictionary list with offset first - truncated_list_offset = _truncate_with_offset( - dictionary_list, value_list, offset) - - # Then truncate it with limit - truncated_list_offset_limit = truncated_list_offset[:limit] - links = [{u'rel': u'self', u'href': self_link}] - if len(truncated_list_offset) > limit: - new_offset = list(truncated_list_offset_limit[limit - 1].values())[0] - next_link = encodeutils.safe_decode(build_base_uri(parsed_uri), 'utf-8') - new_query_params = [u'offset' + '=' + new_offset] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - links.append({u'rel': u'next', u'href': next_link}) - - resource = {u'links': links, - u'elements': truncated_list_offset_limit} - else: - resource = {u'links': ([{u'rel': u'self', - u'href': self_link}]), - u'elements': dictionary_list} - - return resource - - -def _truncate_with_offset(resource, value_list, offset): - """Truncate a list of dictionaries with a given offset. - """ - if not offset: - return resource - - offset = offset.lower() - for i, j in enumerate(value_list): - # if offset matches one of the values in value_list, - # the truncated list should start with the one after current offset - if j == offset: - return resource[i + 1:] - # if offset does not exist in value_list, find the nearest - # location and truncate from that location. - if j > offset: - return resource[i:] - return [] - - -def paginate_alarming(resource, uri, limit): - parsed_uri = urlparse.urlparse(uri) - - self_link = build_base_uri(parsed_uri) - - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - if resource and len(resource) > limit: - - old_offset = 0 - for param in old_query_params: - if param.find('offset') >= 0: - old_offset = int(param.split('=')[-1]) - new_offset = str(limit + old_offset) - - next_link = build_base_uri(parsed_uri) - - new_query_params = [u'offset' + '=' + urlparse.quote( - new_offset.encode('utf8'), safe='')] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - resource = {u'links': ([{u'rel': u'self', - u'href': encodeutils.safe_decode(self_link, 'utf8')}, - {u'rel': u'next', - u'href': encodeutils.safe_decode(next_link, 'utf8')}]), - u'elements': resource[:limit]} - - else: - - resource = {u'links': ([{u'rel': u'self', - u'href': encodeutils.safe_decode(self_link, 'utf8')}]), - u'elements': resource} - - return resource - - -def paginate_dimension_values(dimvals, uri, offset, limit): - - parsed_uri = urlparse.urlparse(uri) - self_link = build_base_uri(parsed_uri) - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - if (dimvals and dimvals[u'values']): - have_more, truncated_values = _truncate_dimension_values(dimvals[u'values'], - limit, - offset) - - links = [{u'rel': u'self', u'href': self_link.decode('utf8')}] - if have_more: - new_offset = truncated_values[limit - 1] - next_link = build_base_uri(parsed_uri) - new_query_params = [u'offset' + '=' + urlparse.quote( - new_offset.encode('utf8'), safe='')] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - links.append({u'rel': u'next', u'href': next_link.decode('utf8')}) - - truncated_dimvals = {u'id': dimvals[u'id'], - u'dimension_name': dimvals[u'dimension_name'], - u'values': truncated_values} - # - # Only return metric name if one was provided - # - if u'metric_name' in dimvals: - truncated_dimvals[u'metric_name'] = dimvals[u'metric_name'] - - resource = {u'links': links, - u'elements': [truncated_dimvals]} - else: - resource = {u'links': ([{u'rel': u'self', - u'href': self_link.decode('utf8')}]), - u'elements': [dimvals]} - - return resource - - -def _truncate_dimension_values(values, limit, offset): - if offset and offset in values: - next_value_pos = values.index(offset) + 1 - values = values[next_value_pos:] - have_more = len(values) > limit - return have_more, values[:limit] - - -def paginate_measurements(measurements, uri, limit): - parsed_uri = urlparse.urlparse(uri) - - self_link = build_base_uri(parsed_uri) - self_link = encodeutils.safe_decode(self_link, 'utf-8') - - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - if measurements: - measurement_elements = [] - resource = {u'links': [{u'rel': u'self', - u'href': self_link}, - ]} - for measurement in measurements: - if len(measurement['measurements']) >= limit: - - new_offset = ('_').join([measurement['id'], - measurement['measurements'][limit - 1][0]]) - - next_link = build_base_uri(parsed_uri) - next_link = encodeutils.safe_decode(next_link, 'utf-8') - - new_query_params = [u'offset' + '=' + urlparse.quote( - new_offset.encode('utf8'), safe='')] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - resource[u'links'].append({u'rel': u'next', - u'href': next_link}) - - truncated_measurement = {u'dimensions': measurement['dimensions'], - u'measurements': (measurement - ['measurements'][:limit]), - u'name': measurement['name'], - u'columns': measurement['columns'], - u'id': measurement['id']} - measurement_elements.append(truncated_measurement) - break - else: - limit -= len(measurement['measurements']) - measurement_elements.append(measurement) - - resource[u'elements'] = measurement_elements - - else: - resource = {u'links': ([{u'rel': u'self', - u'href': self_link}]), - u'elements': []} - - return resource - - -def _get_old_query_params(parsed_uri): - old_query_params = [] - - if parsed_uri.query: - - for query_param in parsed_uri.query.split('&'): - query_param_name, query_param_val = query_param.split('=', 1) - - old_query_params.append(urlparse.quote( - query_param_name.encode('utf8'), safe='') + - "=" + - urlparse.quote(query_param_val.encode('utf8'), safe='')) - - return old_query_params - - -def _get_old_query_params_except_offset(new_query_params, parsed_uri): - if parsed_uri.query: - - for query_param in parsed_uri.query.split('&'): - query_param_name, query_param_val = query_param.split('=', 1) - if query_param_name.lower() != 'offset': - new_query_params.append(urlparse.quote( - query_param_name.encode( - 'utf8'), safe='') + "=" + urlparse.quote( - query_param_val.encode( - 'utf8'), safe='')) - - -def paginate_statistics(statistics, uri, limit): - parsed_uri = urlparse.urlparse(uri) - - self_link = build_base_uri(parsed_uri) - - old_query_params = _get_old_query_params(parsed_uri) - - if old_query_params: - self_link += '?' + '&'.join(old_query_params) - - self_link = encodeutils.safe_decode(self_link, 'utf-8') - - if statistics: - statistic_elements = [] - resource = {u'links': [{u'rel': u'self', - u'href': self_link}]} - - for statistic in statistics: - stat_id = statistic['id'] - if len(statistic['statistics']) >= limit: - - # cassadra impl use both id and timestamp to paginate in group by - if 'end_time' in statistic: - new_offset = '_'.join([stat_id, statistic['end_time']]) - del statistic['end_time'] - else: - new_offset = ( - statistic['statistics'][limit - 1][0]) - - next_link = build_base_uri(parsed_uri) - - new_query_params = [u'offset' + '=' + urlparse.quote( - new_offset.encode('utf8'), safe='')] - - _get_old_query_params_except_offset(new_query_params, parsed_uri) - - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - next_link = encodeutils.safe_decode(next_link, 'utf-8') - resource[u'links'].append({u'rel': u'next', - u'href': next_link}) - - truncated_statistic = {u'dimensions': statistic['dimensions'], - u'statistics': (statistic['statistics'][:limit]), - u'name': statistic['name'], - u'columns': statistic['columns'], - u'id': statistic['id']} - - statistic_elements.append(truncated_statistic) - break - else: - limit -= len(statistic['statistics']) - if 'end_time' in statistic: - del statistic['end_time'] - statistic_elements.append(statistic) - - resource[u'elements'] = statistic_elements - - else: - - resource = {u'links': ([{u'rel': u'self', - u'href': self_link}]), - u'elements': []} - - return resource - - -def create_alarms_count_next_link(uri, offset, limit): - if offset is None: - offset = 0 - parsed_url = urlparse.urlparse(uri) - base_url = build_base_uri(parsed_url) - new_query_params = [u'offset=' + urlparse.quote(str(offset + limit))] - _get_old_query_params_except_offset(new_query_params, parsed_url) - - next_link = base_url - if new_query_params: - next_link += '?' + '&'.join(new_query_params) - - return next_link - - -def build_base_uri(parsed_uri): - return parsed_uri.scheme + '://' + parsed_uri.netloc + parsed_uri.path - - -def get_link(uri, resource_id, rel='self'): - """Returns a link dictionary containing href, and rel. - - :param uri: the http request.uri. - :param resource_id: the id of the resource - """ - parsed_uri = urlparse.urlparse(uri) - href = build_base_uri(parsed_uri) - href += '/' + resource_id - - if rel: - link_dict = dict(href=href, rel=rel) - else: - link_dict = dict(href=href) - - return link_dict - - -def add_links_to_resource(resource, uri, rel='self'): - """Adds links to the given resource dictionary. - - :param resource: the resource dictionary you wish to add links. - :param uri: the http request.uri. - """ - resource['links'] = [get_link(uri, resource['id'], rel)] - return resource - - -def add_links_to_resource_list(resourcelist, uri): - """Adds links to the given resource dictionary list. - - :param resourcelist: the list of resources you wish to add links. - :param uri: the http request.uri. - """ - for resource in resourcelist: - add_links_to_resource(resource, uri) - return resourcelist - - -def raise_not_found_exception(resource_name, resource_id, tenant_id): - """Provides exception for not found requests (update, delete, list). - - :param resource_name: the name of the resource. - :param resource_id: id of the resource. - :param tenant_id: id of the tenant - """ - msg = 'No %s method exists for tenant_id = %s id = %s' % ( - resource_name, tenant_id, resource_id) - raise falcon.HTTPError( - status='404 Not Found', - title='Not Found', - description=msg, - code=404) - - -def str_2_bool(s): - return s.lower() in ("true") diff --git a/monasca_api/v2/reference/logs.py b/monasca_api/v2/reference/logs.py deleted file mode 100644 index 539a7b768..000000000 --- a/monasca_api/v2/reference/logs.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. -# Copyright 2016-2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from oslo_log import log - -from monasca_api.api.core.log import exceptions -from monasca_api.api.core.log import validation -from monasca_api.api import logs_api -from monasca_api import conf -from monasca_api.v2.common import bulk_processor -from monasca_api.v2.reference import helpers - - -CONF = conf.CONF -LOG = log.getLogger(__name__) - - -class Logs(logs_api.LogsApi): - - VERSION = 'v2.0' - SUPPORTED_CONTENT_TYPES = {'application/json'} - - def __init__(self): - - super(Logs, self).__init__() - self._processor = bulk_processor.BulkProcessor() - - def on_post(self, req, res): - helpers.validate_json_content_type(req) - helpers.validate_authorization(req, ['api:logs:post']) - helpers.validate_payload_size(req.content_length) - self.process_on_post_request(req, res) - - def process_on_post_request(self, req, res): - try: - request_body = helpers.from_json(req) - log_list = self._get_logs(request_body) - global_dimensions = self._get_global_dimensions(request_body) - - except Exception as ex: - LOG.error('Entire bulk package has been rejected') - LOG.exception(ex) - - raise ex - tenant_id = (req.cross_project_id if req.cross_project_id - else req.project_id) - - try: - self._processor.send_message( - logs=log_list, - global_dimensions=global_dimensions, - log_tenant_id=tenant_id - ) - except Exception as ex: - res.status = getattr(ex, 'status', falcon.HTTP_500) - return - - res.status = falcon.HTTP_204 - - @staticmethod - def _get_global_dimensions(request_body): - """Get the top level dimensions in the HTTP request body.""" - global_dims = request_body.get('dimensions', {}) - validation.validate_dimensions(global_dims) - return global_dims - - @staticmethod - def _get_logs(request_body): - """Get the logs in the HTTP request body.""" - if request_body is None: - raise falcon.HTTPBadRequest('Bad request', - 'Request body is Empty') - if 'logs' not in request_body: - raise exceptions.HTTPUnprocessableEntity( - 'Unprocessable Entity Logs not found') - return request_body['logs'] diff --git a/monasca_api/v2/reference/metrics.py b/monasca_api/v2/reference/metrics.py deleted file mode 100644 index a66b65341..000000000 --- a/monasca_api/v2/reference/metrics.py +++ /dev/null @@ -1,353 +0,0 @@ -# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from monasca_common.simport import simport -from monasca_common.validation import metrics as metric_validation -from oslo_config import cfg -from oslo_log import log - -from monasca_api.api import metrics_api_v2 -from monasca_api.common.messaging import ( - exceptions as message_queue_exceptions) -from monasca_api.common.messaging.message_formats import ( - metrics as metrics_message) -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.reference import helpers -from monasca_api.v2.reference import resource - -LOG = log.getLogger(__name__) - - -def get_merge_metrics_flag(req): - '''Return the value of the optional metrics_flag - - Returns False if merge_metrics parameter is not supplied or is not a - string that evaluates to True, otherwise True - ''' - - merge_metrics_flag = helpers.get_query_param(req, - 'merge_metrics', - False, - False) - if merge_metrics_flag is not False: - return helpers.str_2_bool(merge_metrics_flag) - else: - return False - - -class Metrics(metrics_api_v2.MetricsV2API): - def __init__(self): - try: - super(Metrics, self).__init__() - self._region = cfg.CONF.region - self._message_queue = simport.load(cfg.CONF.messaging.driver)( - 'metrics') - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - self._batch_size = cfg.CONF.kafka.queue_buffering_max_messages - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', - str(ex)) - - def _send_metrics(self, metrics): - try: - for i in range(0, len(metrics), self._batch_size): - batch = metrics[i:i + self._batch_size] - self._message_queue.send_message(batch) - except message_queue_exceptions.MessageQueueException as ex: - LOG.exception(ex) - raise falcon.HTTPServiceUnavailable('Service unavailable', - str(ex), 60) - - def _list_metrics(self, tenant_id, name, dimensions, req_uri, offset, - limit, start_timestamp, end_timestamp): - - result = self._metrics_repo.list_metrics(tenant_id, - self._region, - name, - dimensions, - offset, limit, - start_timestamp, - end_timestamp) - - return helpers.paginate(result, req_uri, limit) - - @resource.resource_try_catch_block - def on_post(self, req, res): - helpers.validate_json_content_type(req) - helpers.validate_authorization(req, ['api:metrics:post']) - metrics = helpers.from_json(req) - try: - metric_validation.validate(metrics) - except Exception as ex: - LOG.exception(ex) - raise HTTPUnprocessableEntityError("Unprocessable Entity", str(ex)) - - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - transformed_metrics = metrics_message.transform( - metrics, tenant_id, self._region) - self._send_metrics(transformed_metrics) - res.status = falcon.HTTP_204 - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:get']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - name = helpers.get_query_name(req) - helpers.validate_query_name(name) - dimensions = helpers.get_query_dimensions(req) - helpers.validate_query_dimensions(dimensions) - offset = helpers.get_query_param(req, 'offset') - start_timestamp = helpers.get_query_starttime_timestamp(req, False) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - helpers.validate_start_end_timestamps(start_timestamp, end_timestamp) - result = self._list_metrics(tenant_id, name, - dimensions, req.uri, - offset, req.limit, - start_timestamp, end_timestamp) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - -class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API): - def __init__(self): - try: - super(MetricsMeasurements, self).__init__() - self._region = cfg.CONF.region - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', - str(ex)) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:get']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - name = helpers.get_query_name(req, True) - helpers.validate_query_name(name) - dimensions = helpers.get_query_dimensions(req) - helpers.validate_query_dimensions(dimensions) - start_timestamp = helpers.get_query_starttime_timestamp(req) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - helpers.validate_start_end_timestamps(start_timestamp, end_timestamp) - offset = helpers.get_query_param(req, 'offset') - merge_metrics_flag = get_merge_metrics_flag(req) - group_by = helpers.get_query_group_by(req) - - result = self._measurement_list(tenant_id, name, dimensions, - start_timestamp, end_timestamp, - req.uri, offset, - req.limit, merge_metrics_flag, - group_by) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _measurement_list(self, tenant_id, name, dimensions, start_timestamp, - end_timestamp, req_uri, offset, - limit, merge_metrics_flag, group_by): - - result = self._metrics_repo.measurement_list(tenant_id, - self._region, - name, - dimensions, - start_timestamp, - end_timestamp, - offset, - limit, - merge_metrics_flag, - group_by) - - return helpers.paginate_measurements(result, req_uri, limit) - - -class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API): - def __init__(self): - try: - super(MetricsStatistics, self).__init__() - self._region = cfg.CONF.region - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', - str(ex)) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:get']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - name = helpers.get_query_name(req, True) - helpers.validate_query_name(name) - dimensions = helpers.get_query_dimensions(req) - helpers.validate_query_dimensions(dimensions) - start_timestamp = helpers.get_query_starttime_timestamp(req) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - helpers.validate_start_end_timestamps(start_timestamp, end_timestamp) - statistics = helpers.get_query_statistics(req) - period = helpers.get_query_period(req) - offset = helpers.get_query_param(req, 'offset') - merge_metrics_flag = get_merge_metrics_flag(req) - group_by = helpers.get_query_group_by(req) - - result = self._metric_statistics(tenant_id, name, dimensions, - start_timestamp, end_timestamp, - statistics, period, req.uri, - offset, req.limit, merge_metrics_flag, - group_by) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _metric_statistics(self, tenant_id, name, dimensions, start_timestamp, - end_timestamp, statistics, period, req_uri, - offset, limit, merge_metrics_flag, group_by): - - result = self._metrics_repo.metrics_statistics(tenant_id, - self._region, - name, - dimensions, - start_timestamp, - end_timestamp, - statistics, period, - offset, - limit, - merge_metrics_flag, - group_by) - - return helpers.paginate_statistics(result, req_uri, limit) - - -class MetricsNames(metrics_api_v2.MetricsNamesV2API): - def __init__(self): - try: - super(MetricsNames, self).__init__() - self._region = cfg.CONF.region - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', - str(ex)) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:get']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - dimensions = helpers.get_query_dimensions(req) - helpers.validate_query_dimensions(dimensions) - offset = helpers.get_query_param(req, 'offset') - result = self._list_metric_names(tenant_id, dimensions, - req.uri, offset, req.limit) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _list_metric_names(self, tenant_id, dimensions, req_uri, offset, - limit): - - result = self._metrics_repo.list_metric_names(tenant_id, - self._region, - dimensions) - - return helpers.paginate_with_no_id(result, req_uri, offset, limit) - - -class DimensionValues(metrics_api_v2.DimensionValuesV2API): - def __init__(self): - try: - super(DimensionValues, self).__init__() - self._region = cfg.CONF.region - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', str(ex)) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:dimension:values']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - metric_name = helpers.get_query_param(req, 'metric_name') - dimension_name = helpers.get_query_param(req, 'dimension_name', - required=True) - offset = helpers.get_query_param(req, 'offset') - start_timestamp = helpers.get_query_starttime_timestamp(req, False) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - result = self._dimension_values(tenant_id, req.uri, metric_name, - dimension_name, offset, req.limit, - start_timestamp, end_timestamp) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _dimension_values(self, tenant_id, req_uri, metric_name, - dimension_name, offset, limit, start_timestamp, - end_timestamp): - - result = self._metrics_repo.list_dimension_values(tenant_id, - self._region, - metric_name, - dimension_name, - start_timestamp, - end_timestamp) - - return helpers.paginate_with_no_id(result, req_uri, offset, limit) - - -class DimensionNames(metrics_api_v2.DimensionNamesV2API): - def __init__(self): - try: - super(DimensionNames, self).__init__() - self._region = cfg.CONF.region - self._metrics_repo = simport.load( - cfg.CONF.repositories.metrics_driver)() - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError('Service unavailable', - str(ex)) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:metrics:dimension:names']) - tenant_id = helpers.get_x_tenant_or_tenant_id(req, ['api:delegate']) - metric_name = helpers.get_query_param(req, 'metric_name') - offset = helpers.get_query_param(req, 'offset') - start_timestamp = helpers.get_query_starttime_timestamp(req, False) - end_timestamp = helpers.get_query_endtime_timestamp(req, False) - result = self._dimension_names(tenant_id, req.uri, metric_name, - offset, req.limit, - start_timestamp, end_timestamp) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - def _dimension_names(self, tenant_id, req_uri, metric_name, offset, limit, - start_timestamp, end_timestamp): - - result = self._metrics_repo.list_dimension_names(tenant_id, - self._region, - metric_name, - start_timestamp, - end_timestamp) - - return helpers.paginate_with_no_id(result, req_uri, offset, limit) diff --git a/monasca_api/v2/reference/notifications.py b/monasca_api/v2/reference/notifications.py deleted file mode 100644 index ac1e7fc2a..000000000 --- a/monasca_api/v2/reference/notifications.py +++ /dev/null @@ -1,277 +0,0 @@ -# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from monasca_common.simport import simport -from oslo_config import cfg -from oslo_log import log -import six - -from monasca_api.api import notifications_api_v2 -from monasca_api.common.repositories import exceptions -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.common.schemas import ( - notifications_request_body_schema as schemas_notifications) -from monasca_api.v2.common.schemas import exceptions as schemas_exceptions -from monasca_api.v2.common import validation -from monasca_api.v2.reference import helpers -from monasca_api.v2.reference import resource - -LOG = log.getLogger(__name__) - - -class Notifications(notifications_api_v2.NotificationsV2API): - def __init__(self): - - super(Notifications, self).__init__() - - self._region = cfg.CONF.region - self._notifications_repo = simport.load( - cfg.CONF.repositories.notifications_driver)() - self._notification_method_type_repo = simport.load( - cfg.CONF.repositories.notification_method_type_driver)() - self.valid_periods = cfg.CONF.valid_notification_periods - - def _parse_and_validate_notification(self, notification, require_all=False): - """Validates the notification - - :param notification: An event object. - :raises falcon.HTTPBadRequest - """ - try: - schemas_notifications.parse_and_validate( - notification, self.valid_periods, require_all=require_all) - except schemas_exceptions.ValidationException as ex: - LOG.exception(ex) - raise falcon.HTTPBadRequest(title='Bad Request', description=str(ex)) - - def _validate_name_not_conflicting(self, tenant_id, name, expected_id=None): - try: - notification = self._notifications_repo.find_notification_by_name(tenant_id, name) - except exceptions.DoesNotExistException: - notification = None - - if notification: - if not expected_id: - LOG.warning( - "Found existing notification method for {} with tenant_id {}" - .format(name, tenant_id)) - raise exceptions.AlreadyExistsException( - "A notification method with the name {} already exists".format(name)) - - found_notification_id = notification['id'] - if found_notification_id != expected_id: - LOG.warning( - "Found existing notification method for {} " - "with tenant_id {} with unexpected id {}" - .format(name, tenant_id, found_notification_id)) - raise exceptions.AlreadyExistsException( - "A notification method with name {} already exists with id {}" - .format(name, found_notification_id)) - - def _validate_notification_method_type_exist(self, nmt): - notification_methods = self._notification_method_type_repo.list_notification_method_types() - exists = nmt.upper() in notification_methods - - if not exists: - LOG.warning( - "Found no notification method type {}." - "Did you install/enable the plugin for that type?" - .format(nmt)) - raise falcon.HTTPBadRequest( - title='Bad Request', - description="Not a valid notification method type {} ".format(nmt)) - - def _create_notification(self, tenant_id, notification, uri): - - name = notification['name'] - notification_type = notification['type'].upper() - address = notification['address'] - period = notification['period'] - - self._validate_name_not_conflicting(tenant_id, name) - self._validate_notification_method_type_exist(notification_type) - - notification_id = self._notifications_repo.create_notification( - tenant_id, - name, - notification_type, - address, - period) - - return self._create_notification_response(notification_id, - name, - notification_type, - address, - period, - uri) - - def _update_notification(self, notification_id, tenant_id, notification, uri): - - name = notification['name'] - notification_type = notification['type'].upper() - address = notification['address'] - period = notification['period'] - - self._validate_name_not_conflicting(tenant_id, name, expected_id=notification_id) - self._validate_notification_method_type_exist(notification_type) - - self._notifications_repo.update_notification(notification_id, tenant_id, name, - notification_type, - address, - period) - - return self._create_notification_response(notification_id, - name, - notification_type, - address, - period, - uri) - - def _create_notification_response(self, id, name, type, - address, period, uri): - - response = { - 'id': id, - 'name': name, - 'type': type, - 'address': address, - 'period': period - } - - return helpers.add_links_to_resource(response, uri) - - def _list_notifications(self, tenant_id, uri, sort_by, offset, limit): - - rows = self._notifications_repo.list_notifications(tenant_id, sort_by, - offset, limit) - - result = [self._build_notification_result(row, - uri) for row in rows] - - return helpers.paginate(result, uri, limit) - - def _list_notification(self, tenant_id, notification_id, uri): - - row = self._notifications_repo.list_notification( - tenant_id, - notification_id) - - return self._build_notification_result(row, uri) - - def _build_notification_result(self, notification_row, uri): - - result = { - u'id': notification_row['id'], - u'name': notification_row['name'], - u'type': notification_row['type'], - u'address': notification_row['address'], - u'period': notification_row['period'] - } - - helpers.add_links_to_resource(result, uri) - - return result - - def _delete_notification(self, tenant_id, notification_id): - - self._notifications_repo.delete_notification(tenant_id, - notification_id) - - def _patch_get_notification(self, tenant_id, notification_id, notification): - original_notification = self._notifications_repo.list_notification( - tenant_id, notification_id) - if 'name' not in notification: - notification['name'] = original_notification['name'] - if 'type' not in notification: - notification['type'] = original_notification['type'] - if 'address' not in notification: - notification['address'] = original_notification['address'] - if 'period' not in notification: - notification['period'] = original_notification['period'] - - @resource.resource_try_catch_block - def on_post(self, req, res): - helpers.validate_json_content_type(req) - helpers.validate_authorization(req, ['api:notifications:post']) - notification = helpers.from_json(req) - self._parse_and_validate_notification(notification) - result = self._create_notification(req.project_id, notification, req.uri) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_201 - - @resource.resource_try_catch_block - def on_get(self, req, res, notification_method_id=None): - helpers.validate_authorization(req, ['api:notifications:get']) - if notification_method_id is None: - sort_by = helpers.get_query_param(req, 'sort_by', default_val=None) - if sort_by is not None: - if isinstance(sort_by, six.string_types): - sort_by = sort_by.split(',') - - allowed_sort_by = {'id', 'name', 'type', 'address', - 'updated_at', 'created_at'} - - validation.validate_sort_by(sort_by, allowed_sort_by) - - offset = helpers.get_query_param(req, 'offset') - if offset is not None and not isinstance(offset, int): - try: - offset = int(offset) - except Exception: - raise HTTPUnprocessableEntityError('Unprocessable Entity', - 'Offset value {} must be an integer' - .format(offset)) - - result = self._list_notifications(req.project_id, req.uri, sort_by, - offset, req.limit) - - else: - - result = self._list_notification(req.project_id, - notification_method_id, - req.uri) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_delete(self, req, res, notification_method_id): - helpers.validate_authorization(req, ['api:notifications:delete']) - self._delete_notification(req.project_id, notification_method_id) - res.status = falcon.HTTP_204 - - @resource.resource_try_catch_block - def on_put(self, req, res, notification_method_id): - helpers.validate_json_content_type(req) - helpers.validate_authorization(req, ['api:notifications:put']) - notification = helpers.from_json(req) - self._parse_and_validate_notification(notification, require_all=True) - result = self._update_notification(notification_method_id, req.project_id, - notification, req.uri) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - - @resource.resource_try_catch_block - def on_patch(self, req, res, notification_method_id): - helpers.validate_json_content_type(req) - helpers.validate_authorization(req, ['api:notifications:patch']) - notification = helpers.from_json(req) - self._patch_get_notification(req.project_id, notification_method_id, notification) - self._parse_and_validate_notification(notification, require_all=True) - result = self._update_notification(notification_method_id, req.project_id, - notification, req.uri) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 diff --git a/monasca_api/v2/reference/notificationstype.py b/monasca_api/v2/reference/notificationstype.py deleted file mode 100644 index 5d6f9b1f0..000000000 --- a/monasca_api/v2/reference/notificationstype.py +++ /dev/null @@ -1,44 +0,0 @@ -# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from monasca_common.simport import simport -from oslo_config import cfg - -from monasca_api.api import notificationstype_api_v2 -from monasca_api.v2.reference import helpers -from monasca_api.v2.reference import resource - - -class NotificationsType(notificationstype_api_v2.NotificationsTypeV2API): - def __init__(self): - super(NotificationsType, self).__init__() - self._notification_method_type_repo = simport.load( - cfg.CONF.repositories.notification_method_type_driver)() - - def _list_notifications(self, uri, limit): - rows = self._notification_method_type_repo.list_notification_method_types() - result = [dict(type=row) for row in rows] - return helpers.paginate(result, uri, limit) - - @resource.resource_try_catch_block - def on_get(self, req, res): - helpers.validate_authorization(req, ['api:notifications:type']) - # This is to provide consistency. Pagination is not really supported here as there - # are not that many rows - result = self._list_notifications(req.uri, req.limit) - - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 diff --git a/monasca_api/v2/reference/resource.py b/monasca_api/v2/reference/resource.py deleted file mode 100644 index 8ecb22a25..000000000 --- a/monasca_api/v2/reference/resource.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -from oslo_log import log - -from monasca_api.common.repositories import exceptions -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError - -LOG = log.getLogger(__name__) - - -def resource_try_catch_block(fun): - def try_it(*args, **kwargs): - try: - return fun(*args, **kwargs) - - except falcon.HTTPError: - raise - - except exceptions.DoesNotExistException: - raise falcon.HTTPNotFound - - except exceptions.MultipleMetricsException as ex: - raise falcon.HTTPConflict(title="MultipleMetrics", description=str(ex)) - - except exceptions.AlreadyExistsException as ex: - raise falcon.HTTPConflict(title=ex.__class__.__name__, description=str(ex)) - - except exceptions.InvalidUpdateException as ex: - raise HTTPUnprocessableEntityError(title=ex.__class__.__name__, description=str(ex)) - - except exceptions.RepositoryException as ex: - LOG.exception(ex) - msg = " ".join(map(str, ex.args[0].args)) - raise falcon.HTTPInternalServerError( - title='The repository was unable to process your request', - description=msg - ) - - except Exception as ex: - LOG.exception(ex) - raise falcon.HTTPInternalServerError(title='Service unavailable', - description=str(ex)) - - return try_it diff --git a/monasca_api/v2/reference/version_2_0.py b/monasca_api/v2/reference/version_2_0.py deleted file mode 100644 index 8d83551c5..000000000 --- a/monasca_api/v2/reference/version_2_0.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P. -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import encodeutils - -from monasca_api.v2.reference import helpers - - -class Version2(object): - def __init__(self): - super(Version2, self).__init__() - - def on_get(self, req, res): - helpers.validate_authorization(req, - ['api:versions']) - result = { - 'id': 'v2.0', - 'links': [{ - 'rel': 'self', - 'href': encodeutils.safe_decode(req.uri, 'utf-8') - }], - 'status': 'CURRENT', - 'updated': "2013-03-06T00:00:00.000Z" - } - res.text = helpers.to_json(result) diff --git a/monasca_api/v2/reference/versions.py b/monasca_api/v2/reference/versions.py deleted file mode 100644 index 9b5fd99e7..000000000 --- a/monasca_api/v2/reference/versions.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2014 Hewlett-Packard -# Copyright 2018 OP5 AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import falcon -import six - -from monasca_api.api import versions_api -from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError -from monasca_api.v2.reference import helpers - -VERSIONS = { - 'v2.0': { - 'id': 'v2.0', - 'links': [{ - 'rel': 'self', - 'href': '' - }], - 'status': 'CURRENT', - 'updated': "2013-03-06T00:00:00.000Z" - } -} - - -class Versions(versions_api.VersionsAPI): - def __init__(self): - super(Versions, self).__init__() - - def on_get(self, req, res, version_id=None): - req_uri = req.uri.decode('utf8') if six.PY2 else req.uri - helpers.validate_authorization(req, - ['api:versions']) - result = { - 'links': [{ - 'rel': 'self', - 'href': req_uri - }], - 'elements': [] - } - if version_id is None: - for version in VERSIONS: - VERSIONS[version]['links'][0]['href'] = ( - req_uri + version) - result['elements'].append(VERSIONS[version]) - res.text = helpers.to_json(result) - res.status = falcon.HTTP_200 - else: - if version_id in VERSIONS: - VERSIONS[version_id]['links'][0]['href'] = req_uri - res.text = helpers.to_json(VERSIONS[version_id]) - res.status = falcon.HTTP_200 - else: - raise HTTPUnprocessableEntityError('Invalid version', - 'No versions found matching ' + version_id) diff --git a/monasca_api/version.py b/monasca_api/version.py deleted file mode 100644 index 4b04b7a37..000000000 --- a/monasca_api/version.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2017 FUJITSU LIMITED -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import pbr.version - -version_info = pbr.version.VersionInfo('monasca-api') -version_str = version_info.version_string() diff --git a/perf/README.md b/perf/README.md deleted file mode 100644 index 77ea62af5..000000000 --- a/perf/README.md +++ /dev/null @@ -1,34 +0,0 @@ - -monasca-api performance benchmarking -============= - -Recommended Configuration -============= - -Install -======= - -Install JMeter - -JMeter can be found at http://jmeter.apache.org/download_jmeter.cgi - -add JMeter bin to the path: PATH=$PATH:~/.../bin - -Monasca-query performance test -============================== - -This test is designed to work with data created from persister-perf performance test but -can work with any monasca-api/db configuration. -monasca-api will need to have region configured to support test data. -JMeter uses monasca-api to query db backend. - -Load monasca_query_test.jmx into jmeter. -Setup user defined variables for your environment. - - keystone_server keystone server ip address - monasca-api_server monasca-api server ip address - keystone_user admin keystone user with monitoring permissions - keystone_password secretadmin password for keystone user - tenant_id tenant_1 tenant id set in monascas-api/keystone. - -Run tests. diff --git a/perf/monasca_query_test.jmx b/perf/monasca_query_test.jmx deleted file mode 100644 index 9146ab05f..000000000 --- a/perf/monasca_query_test.jmx +++ /dev/null @@ -1,624 +0,0 @@ - - - - - - false - true - - - - - - - - continue - - false - 1 - - 1 - 1 - 1511277511000 - 1511277511000 - false - - - - - - - - keystone_server - 10.84.43.189 - = - keystone server ip address - - - monasca-api_server - 10.84.43.189 - = - monasca-api server ip address - - - keystone_user - admin - = - keystone user with monitoring permissions - - - keystone_password - secretadmin - = - password for keystone user - - - tenant_id - tenant_1 - = - tenant id set in monascas-api/keystone. - - - loop_count - 5 - = - - - - - - true - - - - false - {"auth": {"scope": {"project": {"domain": {"id": "default"}, "name": "${keystone_user}"}}, "identity": {"password": {"user": {"domain": {"id": "default"}, "password": "${keystone_password}", "name": "${keystone_user}"}}, "methods": ["password"]}}} - = - - - - ${keystone_server} - - - - /identity/v3/auth/tokens - POST - true - false - true - false - - HttpClient4 - - - - - - - - Content-type - application/json - - - - - - true - Token - X-Subject-Token: (.+?)\s - $1$ - Doah! - 1 - all - Token - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics?tenant_id=${tenant_id}&limit=2 - GET - true - false - true - false - - - - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics?tenant_id=${tenant_id}&limit=9000 - GET - true - false - true - false - - - - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics?tenant_id=${tenant_id}&dimensions=${dimension_name_1}:${dimension_value}&name=${metric_name}&limit=9000 - GET - true - false - true - false - - - - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics/dimensions/names?tenant_id=${tenant_id} - GET - true - false - false - false - - - - - - - dimension_name - $..dimension_name - -1 - all - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics/dimensions/names/values?tenant_id=${tenant_id}&dimension_name=${dimension_name_1} - GET - true - false - false - false - - - - return all of the hostnames values. - - - - dimension_value - $..dimension_value - 0 - all - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics/names?tenant_id=${tenant_id} - GET - true - false - false - false - - - - - - - metric_name - $..name - 0 - all - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics/measurements?tenant_id=${tenant_id}&name=${metric_name}&dimensions=${dimension_name_1}:${dimension_value}&start_time=2014-07-18T00:00:01&group_by=dimension - GET - true - false - false - false - - - - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - true - ${loop_count} - - - - true - - - - false - - = - - - - ${monasca-api_server} - 8070 - - - /v2.0/metrics/statistics?name=${metric_name}&tenant_id=${tenant_id}&dimensions=${dimension_name_1}:${dimension_value}&start_time=2014-07-18T00:00:01Z&statistics=avg,min,max,sum,count&end_time=${__time(yyyy-MM-dd)}T23:59:59Z&group_by=dimension - GET - true - false - false - false - - - - - - - - - Accept - application/json - - - X-Auth-Token - ${Token} - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - false - true - false - false - false - true - 0 - true - true - true - true - true - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - false - true - false - false - false - true - 0 - true - true - true - true - true - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - false - true - false - false - false - true - 0 - true - true - true - true - true - - - - Response Time Graph - true - - - - - - true - - - - diff --git a/playbooks/docker-publish.yml b/playbooks/docker-publish.yml deleted file mode 100644 index 4ae4baa3b..000000000 --- a/playbooks/docker-publish.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -- hosts: all - tasks: - - name: Login to Dockerhub - command: "docker login -u {{ doker_hub_login_api.user }} -p {{ doker_hub_login_api.password }}" - no_log: true - - - name: List images - shell: "docker images --format '{% raw %}{{ .Repository }}:{{ .Tag }}{% endraw %}' | grep monasca" - - - name: Push to Docker Hub all tags - shell: "docker push monasca/api:{{ zuul.tag if zuul.pipeline == 'release' else 'master'}}" diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 2a18ea97c..000000000 --- a/pom.xml +++ /dev/null @@ -1,53 +0,0 @@ - - 4.0.0 - - monasca - monasca-api-base - 1.2.1-SNAPSHOT - https://github.com/openstack/monasca-api - pom - - - - ${version} ${sun.java.command} - true - UTF-8 - UTF-8 - - - - scm:git:git@github.com:openstack/monasca-api - scm:git:git@github.com:openstack/monasca-api - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.1.1 - - - package-execution - package - - exec - - - - clean-execution - clean - - exec - - - - - run_maven.sh - - - - - - diff --git a/releasenotes/locale/.gitkeep b/releasenotes/locale/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/releasenotes/notes/add-cassandra-loadbalancing-policy-4d5d0e7e8064a870.yaml b/releasenotes/notes/add-cassandra-loadbalancing-policy-4d5d0e7e8064a870.yaml deleted file mode 100644 index 330930ec5..000000000 --- a/releasenotes/notes/add-cassandra-loadbalancing-policy-4d5d0e7e8064a870.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - | - support cassandra cluster configuration with loadbalancing policy DCAwareRoundRobinPolicy. - 'local_data_center' defalut value is ''. diff --git a/releasenotes/notes/add-monasca-status-upgrade-check-c37e6910c2eb0150.yaml b/releasenotes/notes/add-monasca-status-upgrade-check-c37e6910c2eb0150.yaml deleted file mode 100644 index f06e408e6..000000000 --- a/releasenotes/notes/add-monasca-status-upgrade-check-c37e6910c2eb0150.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -features: - - | - Added new tool ``monasca-status upgrade check``. - New framework for ``monasca-status upgrade check`` command is added. - This framework allows adding various checks which can be run before a - Monasca upgrade to ensure if the upgrade can be performed safely. -upgrade: - - | - Operator can now use new CLI tool ``monasca-status upgrade check`` - to check if the Monasca deployment can be safely upgraded from - N-1 to N release. diff --git a/releasenotes/notes/add_legacy_kafka_client_enabled_option-7be9bc4e0fcecc70.yaml b/releasenotes/notes/add_legacy_kafka_client_enabled_option-7be9bc4e0fcecc70.yaml deleted file mode 100644 index d8d2cc0b0..000000000 --- a/releasenotes/notes/add_legacy_kafka_client_enabled_option-7be9bc4e0fcecc70.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -upgrade: - - | - Configuration option `legacy_kafka_client_enabled` added to allow working - with both legacy kafka-python and new Confluent Kafka client. Please set - message format version for the Kafka brokers to 0.9.0.0 to avoid - performance issues until all consumers are upgraded. diff --git a/releasenotes/notes/alarms-count-dimensions-f746ca6c725335b2.yaml b/releasenotes/notes/alarms-count-dimensions-f746ca6c725335b2.yaml deleted file mode 100644 index 8f22806d9..000000000 --- a/releasenotes/notes/alarms-count-dimensions-f746ca6c725335b2.yaml +++ /dev/null @@ -1,8 +0,0 @@ ---- -features: - - | - Allow metric_dimensions filter to filter on multiple dimension values, - on alarms count endpoint (IE metric_dimension=hostname:host1|host2). - This brings the query parameters into parity between the alarms list - and alarms count endpoints. This also restores functionality that was - available on the deprecated java api. diff --git a/releasenotes/notes/apache-kafka-101-d5f3454fd445c727.yaml b/releasenotes/notes/apache-kafka-101-d5f3454fd445c727.yaml deleted file mode 100644 index d772afb36..000000000 --- a/releasenotes/notes/apache-kafka-101-d5f3454fd445c727.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -upgrade: - - | - Apache Kafka has been upgraded to version 1.0.1 in devstack. It is the - recommended version for Monasca. Because of existing old consumers, Kafka - log message format option `log.message.format.version` should be set to - 0.9.0. diff --git a/releasenotes/notes/cassandra-connectiontimeout-option-13bc68e2fcd56580.yaml b/releasenotes/notes/cassandra-connectiontimeout-option-13bc68e2fcd56580.yaml deleted file mode 100644 index 916635936..000000000 --- a/releasenotes/notes/cassandra-connectiontimeout-option-13bc68e2fcd56580.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -features: - - | - Support cassandra connection timeout option which set timeout when creating a new connection. diff --git a/releasenotes/notes/config_gen-ead0282db82e6c0f.yaml b/releasenotes/notes/config_gen-ead0282db82e6c0f.yaml deleted file mode 100644 index 60fc7a6d2..000000000 --- a/releasenotes/notes/config_gen-ead0282db82e6c0f.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -other: - - | - Removed maintaining the example of configuration file. The file was - removed from the tree, however it can be generated using oslo.config - generator feature. Devstack plugin has been also migrated to use it. \ No newline at end of file diff --git a/releasenotes/notes/deprecate-json-formatted-policy-file-2f00c9efa9e274af.yaml b/releasenotes/notes/deprecate-json-formatted-policy-file-2f00c9efa9e274af.yaml deleted file mode 100644 index c9c530004..000000000 --- a/releasenotes/notes/deprecate-json-formatted-policy-file-2f00c9efa9e274af.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -upgrade: - - | - The default value of ``[oslo_policy] policy_file`` config option has - been changed from ``policy.json`` to ``policy.yaml``. - Operators who are utilizing customized or previously generated - static policy JSON files (which are not needed by default), should - generate new policy files or convert them in YAML format. Use the - `oslopolicy-convert-json-to-yaml - `_ - tool to convert a JSON to YAML formatted policy file in - backward compatible way. -deprecations: - - | - Use of JSON policy files was deprecated by the ``oslo.policy`` library - during the Victoria development cycle. As a result, this deprecation is - being noted in the Wallaby cycle with an anticipated future removal of support - by ``oslo.policy``. As such operators will need to convert to YAML policy - files. Please see the upgrade notes for details on migration of any - custom policy files. diff --git a/releasenotes/notes/disable-legacy-kafka-client-16dd1ac1894fe7a0.yaml b/releasenotes/notes/disable-legacy-kafka-client-16dd1ac1894fe7a0.yaml deleted file mode 100644 index c8e4e4de5..000000000 --- a/releasenotes/notes/disable-legacy-kafka-client-16dd1ac1894fe7a0.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - The default value of ``kafka.legacy_kafka_client_enabled`` has changed - from ``True`` to ``False``. The use of new Confluent Kafka client is - recommended. diff --git a/releasenotes/notes/drop-py-2-7-aee58a9afab0e4b9.yaml b/releasenotes/notes/drop-py-2-7-aee58a9afab0e4b9.yaml deleted file mode 100644 index 380500c16..000000000 --- a/releasenotes/notes/drop-py-2-7-aee58a9afab0e4b9.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - Python 2.7 support has been dropped. Last release of monasca-api - to support python 2.7 is OpenStack Train. The minimum version of Python now - supported by monasca-api is Python 3.6. diff --git a/releasenotes/notes/elasticsearch-cluster-upgrade-4b7bdc9c17e0169f.yaml b/releasenotes/notes/elasticsearch-cluster-upgrade-4b7bdc9c17e0169f.yaml deleted file mode 100644 index 2bb11523c..000000000 --- a/releasenotes/notes/elasticsearch-cluster-upgrade-4b7bdc9c17e0169f.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Guide to upgrading to Elasticsearch 7.x can be found here: - https://www.elastic.co/guide/en/cloud/current/ec-upgrading-v7.html diff --git a/releasenotes/notes/enable-disk-tsi-40f29262a0301531.yaml b/releasenotes/notes/enable-disk-tsi-40f29262a0301531.yaml deleted file mode 100644 index 1fa3fb480..000000000 --- a/releasenotes/notes/enable-disk-tsi-40f29262a0301531.yaml +++ /dev/null @@ -1,8 +0,0 @@ ---- -upgrade: - - | - Changes InfluxDB data from in-memory to disk storage - (see https://docs.influxdata.com/influxdb/v1.7/concepts/time-series-index/). - If upgrading an existing InfluxDB install please follow the instructions - for migrating existing data here: - https://docs.influxdata.com/influxdb/v1.7/administration/upgrading/#upgrading-influxdb-1-3-1-4-no-tsi-preview-to-1-7-x-tsi-enabled diff --git a/releasenotes/notes/fix-cassandra-cluster-port.yaml b/releasenotes/notes/fix-cassandra-cluster-port.yaml deleted file mode 100644 index de6928f8d..000000000 --- a/releasenotes/notes/fix-cassandra-cluster-port.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -features: - - | - User can configure Cassandra cluster port via configuration option. Default value for port number is ``9042``. \ No newline at end of file diff --git a/releasenotes/notes/fix-db-migration-issue-2006984-6676bd3a8a34c9ae.yaml b/releasenotes/notes/fix-db-migration-issue-2006984-6676bd3a8a34c9ae.yaml deleted file mode 100644 index a7f1a761c..000000000 --- a/releasenotes/notes/fix-db-migration-issue-2006984-6676bd3a8a34c9ae.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -fixes: - - | - An issue with migrating the DB schema in the Stein release - which can cause an upgrade to fail. See `story - 2006984 `__ diff --git a/releasenotes/notes/influx_1.3.3-1be2009139641336.yaml b/releasenotes/notes/influx_1.3.3-1be2009139641336.yaml deleted file mode 100644 index fd297e986..000000000 --- a/releasenotes/notes/influx_1.3.3-1be2009139641336.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Upgrade InfluxDB to 1.3.3 from 1.3.1. It provides couple of - bug fixes as in https://docs.influxdata.com/influxdb/v1.3/about_the_project/releasenotes-changelog/#v1-3-3-2017-08-10. diff --git a/releasenotes/notes/influx_1.3.8-e6b0be63d7d7222f.yaml b/releasenotes/notes/influx_1.3.8-e6b0be63d7d7222f.yaml deleted file mode 100644 index 9ad67fa3b..000000000 --- a/releasenotes/notes/influx_1.3.8-e6b0be63d7d7222f.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Upgrade InfluxDB to 1.3.8 from 1.3.5. It provides couple of - bug fixes as in https://docs.influxdata.com/influxdb/v1.3/about_the_project/releasenotes-changelog/#v1-3-9-2018-01-19 diff --git a/releasenotes/notes/influxdb-support-for-db-per-tenant-6ada0c3979de6df8.yaml b/releasenotes/notes/influxdb-support-for-db-per-tenant-6ada0c3979de6df8.yaml deleted file mode 100644 index 62ae51328..000000000 --- a/releasenotes/notes/influxdb-support-for-db-per-tenant-6ada0c3979de6df8.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -features: - - | - Configuration option `db_per_tenant` added for InfluxDB to allow - data points to be written to dedicated tenant database where the - `database_name` prefixes the tenant ID, e.g. monasca_tenantid. diff --git a/releasenotes/notes/influxdb_1.3.9-666fc98a5357890d.yaml b/releasenotes/notes/influxdb_1.3.9-666fc98a5357890d.yaml deleted file mode 100644 index cdb2e9fd4..000000000 --- a/releasenotes/notes/influxdb_1.3.9-666fc98a5357890d.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - Upgrade InfluxDB to 1.3.9 from 1.3.8. It provides the bugfix which improves - performance when writes exceed `max-values-per-tag` or `max-series` - https://docs.influxdata.com/influxdb/v1.3/about_the_project/releasenotes-changelog/#v1-3-9-2018-01-19 diff --git a/releasenotes/notes/mergeapis-baa6905c7b8fd070.yaml b/releasenotes/notes/mergeapis-baa6905c7b8fd070.yaml deleted file mode 100644 index 44fa51e27..000000000 --- a/releasenotes/notes/mergeapis-baa6905c7b8fd070.yaml +++ /dev/null @@ -1,11 +0,0 @@ - -features: - - | - Merge monasca-log-api source code into the monasca-api and enable logs endpoints. - - | - Introduce configuration options that allow to enable/disable metrics and logs endpoints. - -other: - - | - Unified response when content-type is incorect. Changed response for log part from 415 to - 400 (Bad request, message - Bad content type. Must be application/json). \ No newline at end of file diff --git a/releasenotes/notes/os-docs-550ce9ad68a4a29e.yaml b/releasenotes/notes/os-docs-550ce9ad68a4a29e.yaml deleted file mode 100644 index 2574c80d3..000000000 --- a/releasenotes/notes/os-docs-550ce9ad68a4a29e.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- - -upgrade: - - Documentation handling of monasca-api has been migrated - to match OpenStack process. Note that this is just - initial migration and entire transition will be completed - in future. diff --git a/releasenotes/notes/oslo-policy-345ff286820badc6.yaml b/releasenotes/notes/oslo-policy-345ff286820badc6.yaml deleted file mode 100644 index 92bece622..000000000 --- a/releasenotes/notes/oslo-policy-345ff286820badc6.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - ``agent_authorized_roles`` default value changed to ``monasca-agent`` to be - consistent with rest of Monasca, was configured by default to ``agent`` so - old policy configuration should be checked and adjusted accordingly. diff --git a/releasenotes/notes/oslo-policy-aebaebd218b9d2ff.yaml b/releasenotes/notes/oslo-policy-aebaebd218b9d2ff.yaml deleted file mode 100644 index bc5e79e8d..000000000 --- a/releasenotes/notes/oslo-policy-aebaebd218b9d2ff.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - | - Use of oslo mechanisms for defining and enforcing policy. - A command line entry point that allow the user to generate a sample policy file. diff --git a/releasenotes/notes/relax-constraints-for-setting-periodic-notifications-68f6c2ed6f89ebc0.yaml b/releasenotes/notes/relax-constraints-for-setting-periodic-notifications-68f6c2ed6f89ebc0.yaml deleted file mode 100644 index e65efdbe4..000000000 --- a/releasenotes/notes/relax-constraints-for-setting-periodic-notifications-68f6c2ed6f89ebc0.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -features: - - | - Adds support for configuring periodic notifications for all notification types. diff --git a/releasenotes/notes/remove-database-url-option-efd6c09b771063c0.yaml b/releasenotes/notes/remove-database-url-option-efd6c09b771063c0.yaml deleted file mode 100644 index 9164ec5ef..000000000 --- a/releasenotes/notes/remove-database-url-option-efd6c09b771063c0.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Deprecated configuration option ``database.url`` has been removed. Please - use ``database.connection`` instead. diff --git a/releasenotes/notes/stop_pre_populating_built_in_monasca_notification_plugins_in_db-140ece49106b4a5a.yaml b/releasenotes/notes/stop_pre_populating_built_in_monasca_notification_plugins_in_db-140ece49106b4a5a.yaml deleted file mode 100644 index 74dfb0d14..000000000 --- a/releasenotes/notes/stop_pre_populating_built_in_monasca_notification_plugins_in_db-140ece49106b4a5a.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - The concept of built-in monasca-notification plugins has been removed - and the built-in plugins are no longer pre-populated in the database. If - you were using the PAGERDUTY, EMAIL or WEBHOOK notification plugin you - should explicitly enable it in the monasca-notification config file. diff --git a/releasenotes/notes/storm_1.1.3-4aa16a9c648cd89b.yaml b/releasenotes/notes/storm_1.1.3-4aa16a9c648cd89b.yaml deleted file mode 100644 index 9f3ca1848..000000000 --- a/releasenotes/notes/storm_1.1.3-4aa16a9c648cd89b.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Upgrade Storm to 1.1.3 from 1.1.1. It provides security fix for - http://www.securityfocus.com/bid/104399 diff --git a/releasenotes/notes/support-configuring-kafka-post-size-4baa10353e859b8a.yaml b/releasenotes/notes/support-configuring-kafka-post-size-4baa10353e859b8a.yaml deleted file mode 100644 index f941cda09..000000000 --- a/releasenotes/notes/support-configuring-kafka-post-size-4baa10353e859b8a.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -features: - - A new config option, queue_buffering_max_messages, has been added to - support controlling the size of posts to Kafka from the Monasca API. diff --git a/releasenotes/notes/support-timerange-for-dimension-names-and-values-e5a2ba64700dcd0b.yaml b/releasenotes/notes/support-timerange-for-dimension-names-and-values-e5a2ba64700dcd0b.yaml deleted file mode 100644 index 65369a78f..000000000 --- a/releasenotes/notes/support-timerange-for-dimension-names-and-values-e5a2ba64700dcd0b.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -features: - - | - Dimensions names and values can be scoped by a timerange which can make - dimension related queries to large databases much faster because only the - relevant shards are searched. Users that upgrade their Monasca Grafana - Datasource plugin to version 1.3.0 will benefit from this feature. diff --git a/releasenotes/notes/upgrade-influxdb-3fa94ef4b15c8217.yaml b/releasenotes/notes/upgrade-influxdb-3fa94ef4b15c8217.yaml deleted file mode 100644 index 55908e8be..000000000 --- a/releasenotes/notes/upgrade-influxdb-3fa94ef4b15c8217.yaml +++ /dev/null @@ -1,10 +0,0 @@ ---- -upgrade: - - | - Upgrade InfluxDB to 1.7.6 from 1.3.9. This provides a number of stability, - performance and bug fix improvements. Full release notes available here: - https://docs.influxdata.com/influxdb/v1.7/about_the_project/releasenotes-changelog/#v1-7-6-2019-04-16 -security: - - | - InfluxDB 1.7.6 fixes a security issue in which monasca-api leaks - dimensions across projects. diff --git a/releasenotes/notes/upgrade-storm-7b4f262d3783d589.yaml b/releasenotes/notes/upgrade-storm-7b4f262d3783d589.yaml deleted file mode 100644 index 0b9578ede..000000000 --- a/releasenotes/notes/upgrade-storm-7b4f262d3783d589.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -upgrade: - - | - Upgrade Storm to 1.2.2 from 1.1.3. - diff --git a/releasenotes/notes/upgrade_kafka_2.0.1-b53f180d751e47f5.yaml b/releasenotes/notes/upgrade_kafka_2.0.1-b53f180d751e47f5.yaml deleted file mode 100644 index 9b9dfb72b..000000000 --- a/releasenotes/notes/upgrade_kafka_2.0.1-b53f180d751e47f5.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - Upgrade Apache Kafka from version 1.0.1 to 2.0.1. Please consult official - `upgrading notes `_ - for complete information on upgrading from previous versions. diff --git a/releasenotes/notes/use-standard-config-file-path-a4c1a29d9d3fcc07.yaml b/releasenotes/notes/use-standard-config-file-path-a4c1a29d9d3fcc07.yaml deleted file mode 100644 index c5af8a873..000000000 --- a/releasenotes/notes/use-standard-config-file-path-a4c1a29d9d3fcc07.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -deprecations: - - | - Configuration file path /etc/monasca/api-config.conf and - /etc/monasca/api.conf are deprecated. Use the standard path - /etc/monasca/monasca-api.conf or the configuration dir (supported via - oslo.config) /etc/monasca/monasca-api.conf.d/any_config_name.conf diff --git a/releasenotes/source/2023.1.rst b/releasenotes/source/2023.1.rst deleted file mode 100644 index d1238479b..000000000 --- a/releasenotes/source/2023.1.rst +++ /dev/null @@ -1,6 +0,0 @@ -=========================== -2023.1 Series Release Notes -=========================== - -.. release-notes:: - :branch: stable/2023.1 diff --git a/releasenotes/source/2023.2.rst b/releasenotes/source/2023.2.rst deleted file mode 100644 index a4838d7d0..000000000 --- a/releasenotes/source/2023.2.rst +++ /dev/null @@ -1,6 +0,0 @@ -=========================== -2023.2 Series Release Notes -=========================== - -.. release-notes:: - :branch: stable/2023.2 diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py deleted file mode 100644 index a40c2cb8b..000000000 --- a/releasenotes/source/conf.py +++ /dev/null @@ -1,212 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.6' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'openstackdocstheme', - 'reno.sphinxext' -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -openstackdocs_repo_name = u'openstack/monasca-api' -openstackdocs_use_storyboard = True - -# Release notes do not need a version number in the title, they -# cover multiple releases. -version = '' -release = '' -copyright = u'2014-present, OpenStack Foundation' -author = u'OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'openstackdocs' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'MonitoringApiReleaseNotesDoc' - -# -- Options for LaTeX output --------------------------------------------- - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [( - master_doc, 'MonitoringApiReleaseNotes.tex', - u'Monitoring API Release Notes', [author], - 'manual' -)] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'monitoringapireleasenotes', - u'Monitoring API Release Notes', [author], - 1) -] - -# -- Options for Internationalization output ------------------------------ -locale_dirs = ['locale/'] diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst deleted file mode 100644 index e1cc1c9eb..000000000 --- a/releasenotes/source/index.rst +++ /dev/null @@ -1,23 +0,0 @@ -============================ -Monitoring API Release Notes -============================ - -Contents: - -.. toctree:: - :maxdepth: 1 - - unreleased - 2023.2 - 2023.1 - zed - yoga - xena - wallaby - victoria - ussuri - train - stein - rocky - queens - pike diff --git a/releasenotes/source/pike.rst b/releasenotes/source/pike.rst deleted file mode 100644 index e43bfc0ce..000000000 --- a/releasenotes/source/pike.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Pike Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/pike diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst deleted file mode 100644 index 36ac6160c..000000000 --- a/releasenotes/source/queens.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Queens Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/queens diff --git a/releasenotes/source/rocky.rst b/releasenotes/source/rocky.rst deleted file mode 100644 index 40dd517b7..000000000 --- a/releasenotes/source/rocky.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Rocky Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/rocky diff --git a/releasenotes/source/stein.rst b/releasenotes/source/stein.rst deleted file mode 100644 index efaceb667..000000000 --- a/releasenotes/source/stein.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Stein Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/stein diff --git a/releasenotes/source/train.rst b/releasenotes/source/train.rst deleted file mode 100644 index 583900393..000000000 --- a/releasenotes/source/train.rst +++ /dev/null @@ -1,6 +0,0 @@ -========================== -Train Series Release Notes -========================== - -.. release-notes:: - :branch: stable/train diff --git a/releasenotes/source/unreleased.rst b/releasenotes/source/unreleased.rst deleted file mode 100644 index cd22aabcc..000000000 --- a/releasenotes/source/unreleased.rst +++ /dev/null @@ -1,5 +0,0 @@ -============================== - Current Series Release Notes -============================== - -.. release-notes:: diff --git a/releasenotes/source/ussuri.rst b/releasenotes/source/ussuri.rst deleted file mode 100644 index e21e50e0c..000000000 --- a/releasenotes/source/ussuri.rst +++ /dev/null @@ -1,6 +0,0 @@ -=========================== -Ussuri Series Release Notes -=========================== - -.. release-notes:: - :branch: stable/ussuri diff --git a/releasenotes/source/victoria.rst b/releasenotes/source/victoria.rst deleted file mode 100644 index 8ce933419..000000000 --- a/releasenotes/source/victoria.rst +++ /dev/null @@ -1,6 +0,0 @@ -============================= -Victoria Series Release Notes -============================= - -.. release-notes:: - :branch: unmaintained/victoria diff --git a/releasenotes/source/wallaby.rst b/releasenotes/source/wallaby.rst deleted file mode 100644 index bcf35c5f8..000000000 --- a/releasenotes/source/wallaby.rst +++ /dev/null @@ -1,6 +0,0 @@ -============================ -Wallaby Series Release Notes -============================ - -.. release-notes:: - :branch: unmaintained/wallaby diff --git a/releasenotes/source/xena.rst b/releasenotes/source/xena.rst deleted file mode 100644 index d19eda488..000000000 --- a/releasenotes/source/xena.rst +++ /dev/null @@ -1,6 +0,0 @@ -========================= -Xena Series Release Notes -========================= - -.. release-notes:: - :branch: unmaintained/xena diff --git a/releasenotes/source/yoga.rst b/releasenotes/source/yoga.rst deleted file mode 100644 index 43cafdea8..000000000 --- a/releasenotes/source/yoga.rst +++ /dev/null @@ -1,6 +0,0 @@ -========================= -Yoga Series Release Notes -========================= - -.. release-notes:: - :branch: unmaintained/yoga diff --git a/releasenotes/source/zed.rst b/releasenotes/source/zed.rst deleted file mode 100644 index 9608c05e4..000000000 --- a/releasenotes/source/zed.rst +++ /dev/null @@ -1,6 +0,0 @@ -======================== -Zed Series Release Notes -======================== - -.. release-notes:: - :branch: stable/zed diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 2efc64060..000000000 --- a/requirements.txt +++ /dev/null @@ -1,31 +0,0 @@ -# Requirements lower bounds listed here are our best effort to keep them up to -# date but we do not test them so no guarantee of having them all correct. If -# you find any incorrect lower bounds, let us know or propose a fix. - -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -oslo.db>=6.0.0 # Apache-2.0 -oslo.config>=6.8.0 # Apache-2.0 -oslo.context>=2.22.0 # Apache-2.0 -oslo.log>=3.36.0 # Apache-2.0 -oslo.middleware>=3.31.0 # Apache-2.0 -oslo.policy>=3.6.0 # Apache-2.0 -oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0 -oslo.upgradecheck>=1.3.0 # Apache-2.0 -oslo.utils>=4.5.0 # Apache-2.0 - -python-keystoneclient>=3.8.0 # Apache-2.0 - -falcon>=2.0.0 # Apache-2.0 -keystonemiddleware>=4.17.0 # Apache-2.0 -Paste>=2.0.2 # MIT -PasteDeploy>=1.5.0 # MIT -pbr!=2.1.0,>=2.0.0 # Apache-2.0 -six>=1.12.0 # MIT -pyparsing>=3.0.6 # MIT -voluptuous>=0.11.1 # BSD License -eventlet!=0.18.3,!=0.20.1,!=0.21.0,!=0.23.0,!=0.25.0,>=0.18.2 # MIT -monasca-common>=2.16.0 # Apache-2.0 -SQLAlchemy>=1.3.0 # MIT -simplejson>=3.8.1 # MIT diff --git a/run_maven.sh b/run_maven.sh deleted file mode 100755 index 5c8b80083..000000000 --- a/run_maven.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash -set -x -env -# Download maven 3 if the system maven isn't maven 3 -VERSION=`mvn -v | grep "Apache Maven 3"` -if [ -z "${VERSION}" ]; then - curl https://dlcdn.apache.org/maven/maven-3/3.9.9/binaries/apache-maven-3.9.9-bin.tar.gz > apache-maven-3.9.9-bin.tar.gz - tar -xvzf apache-maven-3.9.9-bin.tar.gz - MVN=${PWD}/apache-maven-3.9.9/bin/mvn -else - MVN=mvn -fi - -# Get the expected common version -COMMON_VERSION=$1 -# Get rid of the version argument -shift - -# Get rid of the java property name containing the args -shift - -RUN_BUILD=false -for ARG in $*; do - if [ "$ARG" = "package" ]; then - RUN_BUILD=true - fi - if [ "$ARG" = "install" ]; then - RUN_BUILD=true - fi -done - -if [ $RUN_BUILD = "true" ]; then - if [ ! -z "$ZUUL_BRANCH" ]; then - BRANCH=${ZUUL_BRANCH} - else - BRANCH=${ZUUL_REF} - fi - - ( cd common; ./build_common.sh ${MVN} ${COMMON_VERSION} ${BRANCH} ) - RC=$? - if [ $RC != 0 ]; then - exit $RC - fi -fi - -# Invoke the maven 3 on the real pom.xml -( cd java; ${MVN} -DgitRevision=`git rev-list HEAD --max-count 1 --abbrev=0 --abbrev-commit` $* ) - -RC=$? - -# Copy the jars where the publisher will find them -if [ $RUN_BUILD = "true" ]; then - if [ ! -L target ]; then - ln -sf java/target target - fi -fi - -rm -fr apache-maven-3.9.9* -exit $RC diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 1ad0306f0..000000000 --- a/setup.cfg +++ /dev/null @@ -1,62 +0,0 @@ -[metadata] -name = monasca-api - -summary = OpenStack Monitoring as a Service -description_file = - README.rst -author = OpenStack -author_email = openstack-discuss@lists.openstack.org -home_page = https://docs.openstack.org/monasca-api/latest/ -python_requires = >=3.8 -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: Implementation :: CPython - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - -[files] -packages = - monasca_api - -data_files = - /etc/monasca = - etc/api-logging.conf - etc/api-config.ini - -[extras] -influxdb = - influxdb>=2.9.2 # MIT -cassandra = - cassandra-driver!=3.6.0,>=3.3.0 # Apache-2.0 - -[entry_points] -console_scripts = - monasca-api = monasca_api.api.server:launch - monasca_db = monasca_api.cmd.monasca_db:main - monasca-status = monasca_api.cmd.status:main - -wsgi_scripts = - monasca-api-wsgi = monasca_api.api.wsgi:main - -oslo.config.opts = - monasca_api = monasca_api.conf:list_opts - -oslo.policy.policies = - monasca_api = monasca_api.policies:list_rules - -[pbr] -autodoc_index_modules = True -autodoc_exclude_modules = - monasca_api.api.wsgi* - monasca_api.conf.* - monasca_api.tests.* - monasca_tempest_tests.* -api_doc_dir = contributor/api diff --git a/setup.py b/setup.py deleted file mode 100644 index cd35c3c35..000000000 --- a/setup.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import setuptools - -setuptools.setup( - setup_requires=['pbr>=2.0.0'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 5c0c1e741..000000000 --- a/test-requirements.txt +++ /dev/null @@ -1,26 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -# Hacking already pins down pep8, pyflakes and flake8 -bandit>=1.1.0 # Apache-2.0 -bashate>=0.5.1 # Apache-2.0 -hacking>=3.0.1,<3.1.0 # Apache-2.0 -Babel!=2.4.0,>=2.3.4 # BSD -coverage>=4.4.1 # Apache-2.0 -cassandra-driver!=3.6.0,>=3.3.0 # Apache-2.0 -fixtures>=3.0.0 # Apache-2.0/BSD -httplib2>=0.9.1 # MIT -influxdb>=2.9.2 # MIT -oslotest>=3.2.0 # Apache-2.0 -requests-mock>=1.2.0 # Apache-2.0 -stestr>=1.0.0 # Apache-2.0 -python-subunit>=1.0.0 # Apache-2.0/BSD - -# tempest -testrepository>=0.0.18 # Apache-2.0/BSD -testscenarios>=0.4 # Apache-2.0/BSD -testtools>=2.2.0 # MIT -tempest>=17.1.0 # Apache-2.0 - -doc8>=0.6.0 # Apache-2.0 -oslo.config>=6.8.0 # Apache-2.0 \ No newline at end of file diff --git a/tools/bashate.sh b/tools/bashate.sh deleted file mode 100755 index 9f8b4eb13..000000000 --- a/tools/bashate.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -# Ignore too long lines error E006 from bashate and treat -# E005, E042 as errors. -SH_FILES=$(find ./devstack -type d -name files -prune -o -type f -name '*.sh' -print) -bashate -v -iE006 -eE005,E042 ${SH_FILES:-''} diff --git a/tox.ini b/tox.ini deleted file mode 100644 index cf70c0535..000000000 --- a/tox.ini +++ /dev/null @@ -1,179 +0,0 @@ -[tox] -envlist = py3,pep8,cover -minversion = 2.7 -skipsdist = True -ignore_basepython_conflict = True - -[testenv] -basepython = python3 -setenv = - VIRTUAL_ENV={envdir} - OS_TEST_PATH=monasca_api/tests -passenv = *_proxy - *_PROXY -usedevelop = True -install_command = pip install {opts} {packages} -deps = - -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} - -r{toxinidir}/test-requirements.txt - -r{toxinidir}/requirements.txt - .[influxdb,cassandra] -allowlist_externals = bash - find - rm - make -commands = - find . -type f -name "*.pyc" -delete - stestr run {posargs} - -[testenv:cover] -description = Calculates code coverage -setenv = - PYTHON=coverage run --source monasca_api --parallel-mode -commands = - stestr run {posargs} - coverage combine - coverage html -d cover - coverage xml -o cover/coverage.xml - -[testenv:debug] -commands = - oslo_debug_helper -t ./monasca_api/tests {posargs} - -[testenv:flake8] -skip_install = True -usedevelop = False -commands = - flake8 monasca_api - -[testenv:bandit] -skip_install = True -usedevelop = False -commands = - # B101(assert_ussed) - API uses asserts because of performance reasons - # B303 - Fingerprint class uses SHA256 to map fingerprints to alembic revisions. - bandit -r monasca_api -n5 -s B101,B303 -x monasca_api/tests - -[testenv:bashate] -skip_install = True -usedevelop = False -commands = bash {toxinidir}/tools/bashate.sh - -[testenv:pep8] -commands = - {[testenv:flake8]commands} - {[testenv:bandit]commands} - {[testenv:bashate]commands} - {[testenv:checkniceness]commands} - -[testenv:docs] -deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} - -r{toxinidir}/doc/requirements.txt -description = Builds api-ref, api-guide, releasenotes and devdocs -commands = - {[testenv:devdocs]commands} - {[testenv:api-ref]commands} - {[testenv:releasenotes]commands} - -[testenv:pdf-docs] -deps = {[testenv:docs]deps} -envdir = {toxworkdir}/docs -allowlist_externals = - make - rm -commands = - rm -rf doc/build/pdf - sphinx-build -W -b latex doc/source doc/build/pdf - make -C doc/build/pdf - -[testenv:api-ref] -deps = {[testenv:docs]deps} -description = Called from CI scripts to test and publish the API Ref -commands = - rm -rf api-ref/build - {[testenv:checkjson]commands} - sphinx-build -W -b html -d api-ref/build/doctrees \ - api-ref/source api-ref/build/html - -[testenv:releasenotes] -deps = {[testenv:docs]deps} -description = Called from CI script to test and publish the Release Notes -commands = - rm -rf releasenotes/build - sphinx-build -a -E -W -d releasenotes/build/doctrees -b html \ - releasenotes/source releasenotes/build/html - -[testenv:devdocs] -deps = {[testenv:docs]deps} -description = Builds developer documentation -commands = - rm -rf doc/build doc/source/contributor/api - {[testenv:checkjson]commands} - sphinx-build -W -b html doc/source/ doc/build/html - -[testenv:checkniceness] -skip_install = True -usedevelop = False -description = Validates (pep-like) documenation -commands = - rm -rf {toxinidir}/doc/source/contributor/api {toxinidir}/doc/build \ - {toxinidir}/api-ref/build {toxinidir}/releasenotes/build - doc8 --file-encoding utf-8 {toxinidir}/doc - doc8 --file-encoding utf-8 {toxinidir}/api-ref - doc8 --file-encoding utf-8 {toxinidir}/releasenotes - -[testenv:checkjson] -description = Validates all json samples inside doc folder -deps = -skip_install = True -usedevelop = False -whitelist_externals = - python - bash -commands = - bash -c "! find doc/ -type f -name *.json | xargs grep -U -n $'\r'" - bash -c '! find doc/ -type f -name *.json | \ - xargs -t -n1 python -m json.tool 2>&1 > /dev/null | grep -B1 -v ^python' - -[testenv:genconfig] -description = Generates sample configuration file for monasca-api -commands = oslo-config-generator --config-file=config-generator/monasca-api.conf - -[testenv:genpolicy] -description = Generates sample policy.yaml file for monasca-api -commands = oslopolicy-sample-generator --config-file=config-generator/policy.conf - -[testenv:venv] -commands = {posargs} - -[testenv:bindep] -deps = bindep -commands = bindep test - -[flake8] -# TODO: ignored checks should be enabled in the future -# H201 no 'except:' at least use 'except Exception:' -# H302 import only modules -# H405 multi line docstring summary not separated with an empty line -# W504 line break after binary operator -ignore = F821,H201,H302,H405,W504 -# H106: Don't put vim configuration in source files -# H203: Use assertIs(Not)None to check for None -# H204: Use assert(Not)Equal to check for equality. -# H205: Use assert(Greater|Less)(Equal) for comparison. -enable-extensions=H106,H203,H204,H205 -max-complexity = 50 -max-line-length = 100 -builtins = _ -exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools,build -show-source = True - -[hacking] -local-check-factory = monasca_api.hacking.checks.factory -import_exceptions = - six.moves - -[flake8:local-plugins] -extension = - B319 = checks:no_xrange -paths = ./monasca_api/hacking