fix: migrate CI to jammy
fix: change tox.ini fix: change queries for list_dimension_names and list_dimension_values because of influxdb time filter problem fix: remove build_sphinx group from setup.cfg fix: handle hashlib security problem Change-Id: I0d31a8db5ed71c70e7b878ce5e7940e041d0fa43 Change-Id: I6f7066da10e834550cbf0c053c7bf425ac0ead93 Change-Id: If9575aee73d600bbc84fcdf58deb1c57b508d9c2 Change-Id: If515eaeee7539da3ca49997e88785dc65572b334
This commit is contained in:
parent
1421da749b
commit
a24fd834d0
@ -1,7 +1,8 @@
|
|||||||
openjdk-8-jdk # dist:xenial,bionic,focal
|
openjdk-8-jdk # dist:xenial,bionic,focal,jammy
|
||||||
openjdk-8-jre-headless # dist:bionic,focal
|
openjdk-8-jre-headless # dist:bionic,focal,jammy
|
||||||
maven # dist:xenial,bionic,focal
|
maven # dist:xenial,bionic,focal,jammy
|
||||||
jq # dist:xenial,bionic,focal
|
jq # dist:xenial,bionic,focal,jammy
|
||||||
python-dev # dist:xenial,bionic,focal
|
python-dev # dist:xenial,bionic,focal
|
||||||
build-essential # dist:xenial,bionic,focal
|
build-essential # dist:xenial,bionic,focal,jammy
|
||||||
mailutils # dist:xenial,bionic,focal
|
mailutils # dist:xenial,bionic,focal,jammy
|
||||||
|
python-is-python3 # dist:focal,jammy
|
@ -309,7 +309,7 @@ function install_logstash {
|
|||||||
tar xzf ${logstash_dest} -C $DEST
|
tar xzf ${logstash_dest} -C $DEST
|
||||||
|
|
||||||
sudo chown -R $STACK_USER $DEST/logstash-${LOGSTASH_VERSION}
|
sudo chown -R $STACK_USER $DEST/logstash-${LOGSTASH_VERSION}
|
||||||
ln -sf $DEST/logstash-${LOGSTASH_VERSION} $LOGSTASH_DIR
|
sudo ln -sf $DEST/logstash-${LOGSTASH_VERSION} $LOGSTASH_DIR
|
||||||
|
|
||||||
sudo mkdir -p $LOGSTASH_DATA_DIR
|
sudo mkdir -p $LOGSTASH_DATA_DIR
|
||||||
sudo chown $STACK_USER:monasca $LOGSTASH_DATA_DIR
|
sudo chown $STACK_USER:monasca $LOGSTASH_DATA_DIR
|
||||||
@ -339,7 +339,7 @@ function install_elasticsearch {
|
|||||||
tar xzf ${es_dest} -C $DEST
|
tar xzf ${es_dest} -C $DEST
|
||||||
|
|
||||||
sudo chown -R $STACK_USER $DEST/elasticsearch-${ELASTICSEARCH_VERSION}
|
sudo chown -R $STACK_USER $DEST/elasticsearch-${ELASTICSEARCH_VERSION}
|
||||||
ln -sf $DEST/elasticsearch-${ELASTICSEARCH_VERSION} $ELASTICSEARCH_DIR
|
sudo ln -sf $DEST/elasticsearch-${ELASTICSEARCH_VERSION} $ELASTICSEARCH_DIR
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -364,7 +364,7 @@ function configure_elasticsearch {
|
|||||||
s|%ES_LOG_DIR%|$ELASTICSEARCH_LOG_DIR|g;
|
s|%ES_LOG_DIR%|$ELASTICSEARCH_LOG_DIR|g;
|
||||||
" -i $ELASTICSEARCH_CFG_DIR/elasticsearch.yml
|
" -i $ELASTICSEARCH_CFG_DIR/elasticsearch.yml
|
||||||
|
|
||||||
ln -sf $ELASTICSEARCH_CFG_DIR/elasticsearch.yml $GATE_CONFIGURATION_DIR/elasticsearch.yml
|
sudo ln -sf $ELASTICSEARCH_CFG_DIR/elasticsearch.yml $GATE_CONFIGURATION_DIR/elasticsearch.yml
|
||||||
|
|
||||||
echo "[Service]" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null
|
echo "[Service]" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null
|
||||||
echo "LimitNOFILE=$LIMIT_NOFILE" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null
|
echo "LimitNOFILE=$LIMIT_NOFILE" | sudo tee --append /etc/systemd/system/devstack\@elasticsearch.service > /dev/null
|
||||||
@ -420,7 +420,7 @@ function install_kibana {
|
|||||||
local kibana_version_name
|
local kibana_version_name
|
||||||
kibana_version_name=`_get_kibana_version_name`
|
kibana_version_name=`_get_kibana_version_name`
|
||||||
sudo chown -R $STACK_USER $DEST/${kibana_version_name}
|
sudo chown -R $STACK_USER $DEST/${kibana_version_name}
|
||||||
ln -sf $DEST/${kibana_version_name} $KIBANA_DIR
|
sudo ln -sf $DEST/${kibana_version_name} $KIBANA_DIR
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -443,7 +443,7 @@ function configure_kibana {
|
|||||||
s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI|g;
|
s|%KEYSTONE_AUTH_URI%|$KEYSTONE_AUTH_URI|g;
|
||||||
" -i $KIBANA_CFG_DIR/kibana.yml
|
" -i $KIBANA_CFG_DIR/kibana.yml
|
||||||
|
|
||||||
ln -sf $KIBANA_CFG_DIR/kibana.yml $GATE_CONFIGURATION_DIR/kibana.yml
|
sudo ln -sf $KIBANA_CFG_DIR/kibana.yml $GATE_CONFIGURATION_DIR/kibana.yml
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -522,7 +522,7 @@ function build_kibana_plugin {
|
|||||||
yarn --cwd $KIBANA_DEV_DIR kbn bootstrap
|
yarn --cwd $KIBANA_DEV_DIR kbn bootstrap
|
||||||
yarn --cwd $plugin_dir build
|
yarn --cwd $plugin_dir build
|
||||||
|
|
||||||
local get_version_script="import json; obj = json.load(open('$plugin_dir/package.json')); print obj['version']"
|
local get_version_script="import json; obj = json.load(open('$plugin_dir/package.json')); print(obj['version'])"
|
||||||
local monasca_kibana_plugin_version
|
local monasca_kibana_plugin_version
|
||||||
monasca_kibana_plugin_version=$(python -c "$get_version_script")
|
monasca_kibana_plugin_version=$(python -c "$get_version_script")
|
||||||
local pkg="$plugin_dir/build/monasca-kibana-plugin-$monasca_kibana_plugin_version.zip"
|
local pkg="$plugin_dir/build/monasca-kibana-plugin-$monasca_kibana_plugin_version.zip"
|
||||||
|
@ -1154,7 +1154,7 @@ function install_monasca_agent {
|
|||||||
if is_service_enabled monasca-agent; then
|
if is_service_enabled monasca-agent; then
|
||||||
echo_summary "Install Monasca monasca_agent"
|
echo_summary "Install Monasca monasca_agent"
|
||||||
|
|
||||||
apt_get install python-yaml libxml2-dev libxslt1-dev
|
apt_get install python3-yaml libxml2-dev libxslt1-dev
|
||||||
|
|
||||||
MONASCA_AGENT_EXTRAS="kafka_plugin"
|
MONASCA_AGENT_EXTRAS="kafka_plugin"
|
||||||
if is_service_enabled nova && [ "$VIRT_DRIVER" = "libvirt" ]; then
|
if is_service_enabled nova && [ "$VIRT_DRIVER" = "libvirt" ]; then
|
||||||
@ -1262,7 +1262,7 @@ function clean_monasca_agent {
|
|||||||
|
|
||||||
apt_get purge libxslt1-dev
|
apt_get purge libxslt1-dev
|
||||||
apt_get purge libxml2-dev
|
apt_get purge libxml2-dev
|
||||||
apt_get purge python-yaml
|
apt_get purge python3-yaml
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1271,7 +1271,7 @@ function clean_monasca_agent {
|
|||||||
function install_nodejs {
|
function install_nodejs {
|
||||||
|
|
||||||
echo_summary "Install Node.js"
|
echo_summary "Install Node.js"
|
||||||
curl -sL https://deb.nodesource.com/setup_10.x | sudo bash -
|
curl -sL https://deb.nodesource.com/setup_18.x | sudo bash -
|
||||||
|
|
||||||
apt_get install nodejs
|
apt_get install nodejs
|
||||||
npm config set registry "http://registry.npmjs.org/"; \
|
npm config set registry "http://registry.npmjs.org/"; \
|
||||||
|
@ -4,3 +4,4 @@ os-api-ref>=1.4.0 # Apache-2.0
|
|||||||
reno>=3.1.0 # Apache-2.0
|
reno>=3.1.0 # Apache-2.0
|
||||||
openstackdocstheme>=2.2.1 # Apache-2.0
|
openstackdocstheme>=2.2.1 # Apache-2.0
|
||||||
SQLAlchemy>=1.3.0 # MIT
|
SQLAlchemy>=1.3.0 # MIT
|
||||||
|
oslo.config>=6.8.0 # Apache-2.0
|
||||||
|
@ -14,5 +14,3 @@ Modules
|
|||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
api/autoindex.rst
|
|
||||||
|
@ -113,6 +113,29 @@ Administrating
|
|||||||
|
|
||||||
admin/index
|
admin/index
|
||||||
|
|
||||||
|
Glossary
|
||||||
|
-------------
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
glossary
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
install/index
|
||||||
|
|
||||||
|
User
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
user/index
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
@ -124,7 +147,3 @@ Configuration
|
|||||||
admin/index
|
admin/index
|
||||||
cli/index
|
cli/index
|
||||||
configuration/sample
|
configuration/sample
|
||||||
.. only:: html
|
|
||||||
glossary
|
|
||||||
install/index
|
|
||||||
user/index
|
|
||||||
|
@ -8,7 +8,7 @@ ARG CONSTRAINTS_BRANCH=master
|
|||||||
# Extra Python3 dependencies.
|
# Extra Python3 dependencies.
|
||||||
# gevent is not in upper constrains and v1.3.6 is not working with
|
# gevent is not in upper constrains and v1.3.6 is not working with
|
||||||
# older greenlet.
|
# older greenlet.
|
||||||
ARG EXTRA_DEPS="gunicorn gevent==1.5.0 python-memcached influxdb"
|
ARG EXTRA_DEPS="gunicorn gevent>=21.12.0 python-memcached influxdb"
|
||||||
|
|
||||||
# Always start from `monasca-base` image and use specific tag of it.
|
# Always start from `monasca-base` image and use specific tag of it.
|
||||||
ARG BASE_TAG=master
|
ARG BASE_TAG=master
|
||||||
|
@ -41,7 +41,7 @@ def do_detect_revision():
|
|||||||
fingerprint = Fingerprint(sql_repository.get_engine())
|
fingerprint = Fingerprint(sql_repository.get_engine())
|
||||||
|
|
||||||
if fingerprint.revision is None:
|
if fingerprint.revision is None:
|
||||||
print(_FP_NOREVISION % fingerprint.sha1)
|
print(_FP_NOREVISION % fingerprint.sha256)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print(fingerprint.revision)
|
print(fingerprint.revision)
|
||||||
@ -52,7 +52,7 @@ def do_fingerprint():
|
|||||||
if CONF.command.raw:
|
if CONF.command.raw:
|
||||||
print(fingerprint.schema_raw, end="")
|
print(fingerprint.schema_raw, end="")
|
||||||
else:
|
else:
|
||||||
print(fingerprint.sha1)
|
print(fingerprint.sha256)
|
||||||
|
|
||||||
|
|
||||||
def do_stamp():
|
def do_stamp():
|
||||||
@ -71,7 +71,7 @@ def do_stamp():
|
|||||||
else:
|
else:
|
||||||
fp = Fingerprint(engine)
|
fp = Fingerprint(engine)
|
||||||
if fp.revision is None:
|
if fp.revision is None:
|
||||||
print(_FP_NOREVISION % fp.sha1)
|
print(_FP_NOREVISION % fp.sha256)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
rev = fp.revision
|
rev = fp.revision
|
||||||
|
|
||||||
@ -110,7 +110,7 @@ def do_version():
|
|||||||
|
|
||||||
def add_command_parsers(subparsers):
|
def add_command_parsers(subparsers):
|
||||||
parser = subparsers.add_parser('fingerprint',
|
parser = subparsers.add_parser('fingerprint',
|
||||||
help="Compute SHA1 fingerprint of "
|
help="Compute SHA256 fingerprint of "
|
||||||
"current database schema ")
|
"current database schema ")
|
||||||
parser.add_argument('-r', '--raw', action='store_true',
|
parser.add_argument('-r', '--raw', action='store_true',
|
||||||
help='Print raw schema dump used for '
|
help='Print raw schema dump used for '
|
||||||
|
@ -214,6 +214,20 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||||||
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
def _build_select_all_query(self, dimensions, name, tenant_id,
|
||||||
|
region, start_timestamp, end_timestamp,
|
||||||
|
no_record_check_dim):
|
||||||
|
|
||||||
|
from_clause = self._build_from_clause(dimensions, name, tenant_id,
|
||||||
|
region, start_timestamp,
|
||||||
|
end_timestamp)
|
||||||
|
if no_record_check_dim is not None:
|
||||||
|
query = 'select *' + from_clause + " and {} != ''".format(no_record_check_dim)
|
||||||
|
else:
|
||||||
|
query = 'select *' + from_clause
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
def _build_statistics_query(self, dimensions, name, tenant_id,
|
def _build_statistics_query(self, dimensions, name, tenant_id,
|
||||||
region, start_timestamp, end_timestamp,
|
region, start_timestamp, end_timestamp,
|
||||||
statistics, period, offset, group_by, limit):
|
statistics, period, offset, group_by, limit):
|
||||||
@ -263,7 +277,7 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||||||
# replace ' with \' to make query parsable
|
# replace ' with \' to make query parsable
|
||||||
clean_name = name.replace("'", "\\'") if PY3 \
|
clean_name = name.replace("'", "\\'") if PY3 \
|
||||||
else name.replace("'", "\\'").encode('utf-8')
|
else name.replace("'", "\\'").encode('utf-8')
|
||||||
where_clause += ' from "{}" '.format(clean_name)
|
where_clause += ' from "{}"'.format(clean_name)
|
||||||
|
|
||||||
# region
|
# region
|
||||||
where_clause += " where _region = '{}'".format(region)
|
where_clause += " where _region = '{}'".format(region)
|
||||||
@ -889,9 +903,25 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||||||
start_timestamp,
|
start_timestamp,
|
||||||
end_timestamp)
|
end_timestamp)
|
||||||
result = self.query_tenant_db(query, tenant_id)
|
result = self.query_tenant_db(query, tenant_id)
|
||||||
json_dim_name_list = self._build_serie_dimension_values(
|
json_dim_value_list = self._build_serie_dimension_values(
|
||||||
result, dimension_name)
|
result, dimension_name)
|
||||||
return json_dim_name_list
|
json_dim_value_list_filtered = list()
|
||||||
|
for serie in result.raw['series']:
|
||||||
|
for dim_value_dict in json_dim_value_list:
|
||||||
|
query = self._build_select_all_query(
|
||||||
|
dimensions={dimension_name: dim_value_dict['dimension_value']},
|
||||||
|
name=serie['name'],
|
||||||
|
tenant_id=tenant_id,
|
||||||
|
region=region,
|
||||||
|
start_timestamp=start_timestamp,
|
||||||
|
end_timestamp=end_timestamp,
|
||||||
|
no_record_check_dim=None)
|
||||||
|
result = self.query_tenant_db(query, tenant_id)
|
||||||
|
if len(result.raw['series']) > 0:
|
||||||
|
json_dim_value_list_filtered.append(dim_value_dict)
|
||||||
|
json_dim_value_list_filtered = sorted(json_dim_value_list_filtered, key=lambda
|
||||||
|
dim_value_dict: dim_value_dict['dimension_value'])
|
||||||
|
return json_dim_value_list_filtered
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.exception(ex)
|
LOG.exception(ex)
|
||||||
raise exceptions.RepositoryException(ex)
|
raise exceptions.RepositoryException(ex)
|
||||||
@ -905,6 +935,25 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||||||
end_timestamp)
|
end_timestamp)
|
||||||
result = self.query_tenant_db(query, tenant_id)
|
result = self.query_tenant_db(query, tenant_id)
|
||||||
json_dim_name_list = self._build_serie_dimension_names(result)
|
json_dim_name_list = self._build_serie_dimension_names(result)
|
||||||
|
if metric_name is not None:
|
||||||
|
json_dim_name_list_filtered = list()
|
||||||
|
for dim_name_dict in json_dim_name_list:
|
||||||
|
query = self._build_select_all_query(
|
||||||
|
dimensions=None,
|
||||||
|
name=metric_name,
|
||||||
|
tenant_id=tenant_id,
|
||||||
|
region=region,
|
||||||
|
start_timestamp=start_timestamp,
|
||||||
|
end_timestamp=end_timestamp,
|
||||||
|
no_record_check_dim=dim_name_dict['dimension_name'])
|
||||||
|
result = self.query_tenant_db(query, tenant_id)
|
||||||
|
if len(result.raw['series']) > 0:
|
||||||
|
json_dim_name_list_filtered.append(dim_name_dict)
|
||||||
|
|
||||||
|
json_dim_name_list_filtered = sorted(json_dim_name_list_filtered, key=lambda
|
||||||
|
dim_name_dict: dim_name_dict['dimension_name'])
|
||||||
|
return json_dim_name_list_filtered
|
||||||
|
else:
|
||||||
return json_dim_name_list
|
return json_dim_name_list
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.exception(ex)
|
LOG.exception(ex)
|
||||||
@ -915,7 +964,7 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||||||
uri = 'http://{0}:{1}/ping'.format(CONF.influxdb.ip_address,
|
uri = 'http://{0}:{1}/ping'.format(CONF.influxdb.ip_address,
|
||||||
CONF.influxdb.port)
|
CONF.influxdb.port)
|
||||||
try:
|
try:
|
||||||
resp = requests.head(url=uri)
|
resp = requests.head(url=uri, timeout=5)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.exception(str(ex))
|
LOG.exception(str(ex))
|
||||||
return False, str(ex)
|
return False, str(ex)
|
||||||
|
@ -21,7 +21,7 @@ from sqlalchemy.orm import sessionmaker
|
|||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
# Map of SHA1 fingerprints to alembic revisions. Note that this is
|
# Map of SHA256 fingerprints to alembic revisions. Note that this is
|
||||||
# used in the pre-alembic case and does not need to be updated if a
|
# used in the pre-alembic case and does not need to be updated if a
|
||||||
# new revision is introduced.
|
# new revision is introduced.
|
||||||
_REVS = {"43e5913b0272077321ab6f25ffbcda7149b6284b": "00597b5c8325",
|
_REVS = {"43e5913b0272077321ab6f25ffbcda7149b6284b": "00597b5c8325",
|
||||||
@ -43,8 +43,8 @@ class Fingerprint(object):
|
|||||||
def __init__(self, engine):
|
def __init__(self, engine):
|
||||||
metadata = self._get_metadata(engine)
|
metadata = self._get_metadata(engine)
|
||||||
self.schema_raw = self._get_schema_raw(metadata)
|
self.schema_raw = self._get_schema_raw(metadata)
|
||||||
self.sha1 = self._get_schema_sha1(self.schema_raw)
|
self.sha256 = self._get_schema_sha256(self.schema_raw)
|
||||||
self.revision = self._get_revision(metadata, engine, self.sha1)
|
self.revision = self._get_revision(metadata, engine, self.sha256)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_metadata(engine):
|
def _get_metadata(engine):
|
||||||
@ -75,18 +75,18 @@ class Fingerprint(object):
|
|||||||
return "\n".join(schema_strings)
|
return "\n".join(schema_strings)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_schema_sha1(schema_raw):
|
def _get_schema_sha256(schema_raw):
|
||||||
return hashlib.sha1(encodeutils.to_utf8(schema_raw)).hexdigest()
|
return hashlib.sha256(encodeutils.to_utf8(schema_raw)).hexdigest()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_revision(metadata, engine, sha1):
|
def _get_revision(metadata, engine, sha256):
|
||||||
# Alembic stores the current version in the DB so check that first
|
# Alembic stores the current version in the DB so check that first
|
||||||
# and fall back to the lookup table for the pre-alembic case.
|
# and fall back to the lookup table for the pre-alembic case.
|
||||||
versions_table = metadata.tables.get('alembic_version')
|
versions_table = metadata.tables.get('alembic_version')
|
||||||
if versions_table is not None:
|
if versions_table is not None:
|
||||||
return Fingerprint._lookup_version_from_db(versions_table, engine)
|
return Fingerprint._lookup_version_from_db(versions_table, engine)
|
||||||
elif sha1:
|
elif sha256:
|
||||||
return Fingerprint._lookup_version_from_table(sha1)
|
return Fingerprint._lookup_version_from_table(sha256)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_db_session(engine):
|
def _get_db_session(engine):
|
||||||
@ -102,9 +102,9 @@ class Fingerprint(object):
|
|||||||
return session.query(versions_table).one()[0]
|
return session.query(versions_table).one()[0]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _lookup_version_from_table(sha1):
|
def _lookup_version_from_table(sha256):
|
||||||
revision = _REVS.get(sha1)
|
revision = _REVS.get(sha256)
|
||||||
if not revision:
|
if not revision:
|
||||||
LOG.warning("Fingerprint: {} does not match any revisions."
|
LOG.warning("Fingerprint: {} does not match any revisions."
|
||||||
.format(sha1))
|
.format(sha256))
|
||||||
return revision
|
return revision
|
||||||
|
@ -35,11 +35,11 @@ class TestFingerprint(base.BaseTestCase):
|
|||||||
# table of fingerprints. Since we use a dummy schema, we insert a dummy
|
# table of fingerprints. Since we use a dummy schema, we insert a dummy
|
||||||
# entry into the lookup table.
|
# entry into the lookup table.
|
||||||
fingerprint._REVS[
|
fingerprint._REVS[
|
||||||
hashlib.sha1(b'dummy_schema_raw').hexdigest()] = 'dummy_revision'
|
hashlib.sha256(b'dummy_schema_raw').hexdigest()] = 'dummy_revision'
|
||||||
|
|
||||||
f = fingerprint.Fingerprint('mock_engine')
|
f = fingerprint.Fingerprint('mock_engine')
|
||||||
self.assertEqual(f.schema_raw, 'dummy_schema_raw')
|
self.assertEqual(f.schema_raw, 'dummy_schema_raw')
|
||||||
self.assertEqual(f.sha1, hashlib.sha1(b'dummy_schema_raw').hexdigest())
|
self.assertEqual(f.sha256, hashlib.sha256(b'dummy_schema_raw').hexdigest())
|
||||||
self.assertEqual(f.revision, 'dummy_revision')
|
self.assertEqual(f.revision, 'dummy_revision')
|
||||||
|
|
||||||
@mock.patch('monasca_api.db.fingerprint.Fingerprint._get_db_session')
|
@mock.patch('monasca_api.db.fingerprint.Fingerprint._get_db_session')
|
||||||
@ -61,5 +61,5 @@ class TestFingerprint(base.BaseTestCase):
|
|||||||
|
|
||||||
f = fingerprint.Fingerprint('mock_engine')
|
f = fingerprint.Fingerprint('mock_engine')
|
||||||
self.assertEqual(f.schema_raw, 'dummy_schema_raw')
|
self.assertEqual(f.schema_raw, 'dummy_schema_raw')
|
||||||
self.assertEqual(f.sha1, hashlib.sha1(b'dummy_schema_raw').hexdigest())
|
self.assertEqual(f.sha256, hashlib.sha256(b'dummy_schema_raw').hexdigest())
|
||||||
self.assertEqual(f.revision, 'dummy_revision')
|
self.assertEqual(f.revision, 'dummy_revision')
|
||||||
|
@ -221,18 +221,20 @@ class TestRepoMetricsInfluxDB(base.BaseTestCase):
|
|||||||
|
|
||||||
self.assertEqual(result, [{u'dimension_value': hostname}])
|
self.assertEqual(result, [{u'dimension_value': hostname}])
|
||||||
|
|
||||||
query = ('show tag values from "{metric}"'
|
query = ('select * from "{metric}"'
|
||||||
' with key = "{column}"'
|
|
||||||
' where _region = \'{region}\''
|
' where _region = \'{region}\''
|
||||||
.format(region=region, metric=metric, column=column))
|
.format(region=region, metric=metric))
|
||||||
query += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\''
|
query += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\''
|
||||||
.format(tenant_id=tenant_id))
|
.format(tenant_id=tenant_id))
|
||||||
|
query += (' and "{column}" = \'{hostname}\''
|
||||||
|
.format(column=column,
|
||||||
|
hostname=hostname))
|
||||||
query += (' and time >= {start_timestamp}000000u'
|
query += (' and time >= {start_timestamp}000000u'
|
||||||
' and time < {end_timestamp}000000u'
|
' and time < {end_timestamp}000000u'
|
||||||
.format(start_timestamp=start_timestamp,
|
.format(start_timestamp=start_timestamp,
|
||||||
end_timestamp=end_timestamp)
|
end_timestamp=end_timestamp)
|
||||||
if timestamp else '')
|
if timestamp else '')
|
||||||
mock_client.query.assert_called_once_with(query, database=database)
|
mock_client.query.assert_called_with(query, database=database)
|
||||||
|
|
||||||
def test_list_dimension_values_with_timestamp(self):
|
def test_list_dimension_values_with_timestamp(self):
|
||||||
self.test_list_dimension_values(timestamp=True)
|
self.test_list_dimension_values(timestamp=True)
|
||||||
@ -275,18 +277,19 @@ class TestRepoMetricsInfluxDB(base.BaseTestCase):
|
|||||||
{u'dimension_name': u'service'}
|
{u'dimension_name': u'service'}
|
||||||
])
|
])
|
||||||
|
|
||||||
query = ('show tag keys from "{metric}"'
|
query_last = ('select * from "{metric}"'
|
||||||
' where _region = \'{region}\''
|
' where _region = \'{region}\''
|
||||||
.format(region=region, metric=metric))
|
.format(region=region, metric=metric))
|
||||||
query += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\''
|
query_last += ('' if db_per_tenant else ' and _tenant_id = \'{tenant_id}\''
|
||||||
.format(tenant_id=tenant_id))
|
.format(tenant_id=tenant_id))
|
||||||
query += (' and time >= {start_timestamp}000000u'
|
query_last += (' and time >= {start_timestamp}000000u'
|
||||||
' and time < {end_timestamp}000000u'
|
' and time < {end_timestamp}000000u'
|
||||||
.format(start_timestamp=start_timestamp,
|
.format(start_timestamp=start_timestamp,
|
||||||
end_timestamp=end_timestamp)
|
end_timestamp=end_timestamp)
|
||||||
if timestamp else '')
|
if timestamp else '')
|
||||||
|
query_last += (' and service != \'\'')
|
||||||
|
|
||||||
mock_client.query.assert_called_once_with(query, database=database)
|
mock_client.query.assert_called_with(query_last, database=database)
|
||||||
|
|
||||||
def test_list_dimension_names_with_timestamp(self):
|
def test_list_dimension_names_with_timestamp(self):
|
||||||
self.test_list_dimension_names(timestamp=True)
|
self.test_list_dimension_names(timestamp=True)
|
||||||
|
@ -3,4 +3,4 @@ Yoga Series Release Notes
|
|||||||
=========================
|
=========================
|
||||||
|
|
||||||
.. release-notes::
|
.. release-notes::
|
||||||
:branch: stable/yoga
|
:branch: unmaintained/yoga
|
||||||
|
12
setup.cfg
12
setup.cfg
@ -7,7 +7,7 @@ description_file =
|
|||||||
author = OpenStack
|
author = OpenStack
|
||||||
author_email = openstack-discuss@lists.openstack.org
|
author_email = openstack-discuss@lists.openstack.org
|
||||||
home_page = https://docs.openstack.org/monasca-api/latest/
|
home_page = https://docs.openstack.org/monasca-api/latest/
|
||||||
python_requires = >=3.6
|
python_requires = >=3.8
|
||||||
classifier =
|
classifier =
|
||||||
Environment :: OpenStack
|
Environment :: OpenStack
|
||||||
Intended Audience :: Information Technology
|
Intended Audience :: Information Technology
|
||||||
@ -17,9 +17,10 @@ classifier =
|
|||||||
Programming Language :: Python
|
Programming Language :: Python
|
||||||
Programming Language :: Python :: Implementation :: CPython
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
Programming Language :: Python :: 3 :: Only
|
Programming Language :: Python :: 3 :: Only
|
||||||
Programming Language :: Python :: 3.6
|
|
||||||
Programming Language :: Python :: 3.7
|
|
||||||
Programming Language :: Python :: 3.8
|
Programming Language :: Python :: 3.8
|
||||||
|
Programming Language :: Python :: 3.9
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
|
Programming Language :: Python :: 3.11
|
||||||
|
|
||||||
[files]
|
[files]
|
||||||
packages =
|
packages =
|
||||||
@ -65,8 +66,3 @@ autodoc_exclude_modules =
|
|||||||
monasca_tempest_tests.*
|
monasca_tempest_tests.*
|
||||||
api_doc_dir = contributor/api
|
api_doc_dir = contributor/api
|
||||||
|
|
||||||
[build_sphinx]
|
|
||||||
all_files = 1
|
|
||||||
build-dir = doc/build
|
|
||||||
source-dir = doc/source
|
|
||||||
warning-is-error = 1
|
|
||||||
|
@ -23,3 +23,4 @@ testtools>=2.2.0 # MIT
|
|||||||
tempest>=17.1.0 # Apache-2.0
|
tempest>=17.1.0 # Apache-2.0
|
||||||
|
|
||||||
doc8>=0.6.0 # Apache-2.0
|
doc8>=0.6.0 # Apache-2.0
|
||||||
|
oslo.config>=6.8.0 # Apache-2.0
|
12
tox.ini
12
tox.ini
@ -1,7 +1,8 @@
|
|||||||
[tox]
|
[tox]
|
||||||
envlist = py38,pep8,cover
|
envlist = py3,pep8,cover
|
||||||
minversion = 2.7
|
minversion = 2.7
|
||||||
skipsdist = True
|
skipsdist = True
|
||||||
|
ignore_basepython_conflict = True
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
basepython = python3
|
basepython = python3
|
||||||
@ -17,9 +18,10 @@ deps =
|
|||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
-r{toxinidir}/requirements.txt
|
-r{toxinidir}/requirements.txt
|
||||||
.[influxdb,cassandra]
|
.[influxdb,cassandra]
|
||||||
whitelist_externals = bash
|
allowlist_externals = bash
|
||||||
find
|
find
|
||||||
rm
|
rm
|
||||||
|
make
|
||||||
commands =
|
commands =
|
||||||
find . -type f -name "*.pyc" -delete
|
find . -type f -name "*.pyc" -delete
|
||||||
stestr run {posargs}
|
stestr run {posargs}
|
||||||
@ -49,7 +51,7 @@ skip_install = True
|
|||||||
usedevelop = False
|
usedevelop = False
|
||||||
commands =
|
commands =
|
||||||
# B101(assert_ussed) - API uses asserts because of performance reasons
|
# B101(assert_ussed) - API uses asserts because of performance reasons
|
||||||
# B303 - Fingerprint class uses SHA1 to map fingerprints to alembic revisions.
|
# B303 - Fingerprint class uses SHA256 to map fingerprints to alembic revisions.
|
||||||
bandit -r monasca_api -n5 -s B101,B303 -x monasca_api/tests
|
bandit -r monasca_api -n5 -s B101,B303 -x monasca_api/tests
|
||||||
|
|
||||||
[testenv:bashate]
|
[testenv:bashate]
|
||||||
@ -76,7 +78,7 @@ commands =
|
|||||||
[testenv:pdf-docs]
|
[testenv:pdf-docs]
|
||||||
deps = {[testenv:docs]deps}
|
deps = {[testenv:docs]deps}
|
||||||
envdir = {toxworkdir}/docs
|
envdir = {toxworkdir}/docs
|
||||||
whitelist_externals =
|
allowlist_externals =
|
||||||
make
|
make
|
||||||
rm
|
rm
|
||||||
commands =
|
commands =
|
||||||
@ -107,7 +109,7 @@ description = Builds developer documentation
|
|||||||
commands =
|
commands =
|
||||||
rm -rf doc/build doc/source/contributor/api
|
rm -rf doc/build doc/source/contributor/api
|
||||||
{[testenv:checkjson]commands}
|
{[testenv:checkjson]commands}
|
||||||
python setup.py build_sphinx
|
sphinx-build -W -b html doc/source/ doc/build/html
|
||||||
|
|
||||||
[testenv:checkniceness]
|
[testenv:checkniceness]
|
||||||
skip_install = True
|
skip_install = True
|
||||||
|
Loading…
x
Reference in New Issue
Block a user