We shouldn't show filtered stats in the web UI
Filtered by application stats is marked through is_filtered field in the installation_structure. Filtering by is_filtered added into UI queriesto Elasticsearch. Queries tested in migration unit tests. Queries to Elasticsearch creation in migration tests refactored. Change-Id: Icdaa65f856aa7ea8349c39aa1bc5b8aca7368aca Closes-Bug: #1443347
This commit is contained in:
parent
a53f6740cb
commit
59f945ee66
|
@ -47,11 +47,28 @@ function($, d3, D3pie, d3tip, nv, elasticsearch) {
|
|||
}
|
||||
};
|
||||
}
|
||||
// adding filtering by is_filtered
|
||||
result = {
|
||||
aggs: {
|
||||
is_filtered: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{term: {is_filtered: false}},
|
||||
{missing: {field: 'is_filtered'}}
|
||||
]
|
||||
}
|
||||
},
|
||||
aggs: result.aggs
|
||||
}
|
||||
}
|
||||
};
|
||||
return result;
|
||||
};
|
||||
|
||||
var getRootData = function(resp) {
|
||||
return currentRelease ? resp.aggregations.releases : resp.aggregations;
|
||||
var root = resp.aggregations.is_filtered;
|
||||
return currentRelease ? root.releases : root;
|
||||
};
|
||||
|
||||
var elasticSearchHost = function() {
|
||||
|
@ -75,7 +92,25 @@ function($, d3, D3pie, d3tip, nv, elasticsearch) {
|
|||
|
||||
var installationsCount = function() {
|
||||
var client = new elasticsearch.Client(elasticSearchHost());
|
||||
var request = {query: currentRelease ? {terms: {'fuel_release.release': [currentRelease]}} : {match_all: {}}};
|
||||
var request = {
|
||||
query: {
|
||||
filtered: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{term: {'is_filtered': false}},
|
||||
{missing: {'field': 'is_filtered'}},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (currentRelease) {
|
||||
request.query.filtered.filter.bool['must'] = {
|
||||
terms: {'fuel_release.release': [currentRelease]}
|
||||
}
|
||||
}
|
||||
|
||||
client.count({
|
||||
index: 'fuel',
|
||||
|
|
|
@ -86,6 +86,7 @@ MAPPING_FUEL = {
|
|||
"unallocated_nodes_num": {"type": "long"},
|
||||
"creation_date": {"type": "date"},
|
||||
"modification_date": {"type": "date"},
|
||||
"is_filtered": {"type": "boolean"},
|
||||
"fuel_release": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
|
|
@ -138,6 +138,7 @@ class Migrator(object):
|
|||
('master_node_uid',),
|
||||
json_fields=('structure',),
|
||||
mixed_fields_mapping=(
|
||||
NameMapping(source='is_filtered', dest='is_filtered'),
|
||||
NameMapping(source='creation_date', dest='creation_date'),
|
||||
NameMapping(source='modification_date',
|
||||
dest='modification_date')
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from sqlalchemy import Boolean
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import DateTime
|
||||
from sqlalchemy import Integer
|
||||
|
@ -46,3 +47,4 @@ class InstallationStructure(Base):
|
|||
structure = Column(JSON, nullable=False)
|
||||
creation_date = Column(DateTime)
|
||||
modification_date = Column(DateTime)
|
||||
is_filtered = Column(Boolean)
|
||||
|
|
|
@ -145,7 +145,8 @@ class ElasticTest(TestCase):
|
|||
def generate_structure(
|
||||
self,
|
||||
clusters_num_range=(0, 10),
|
||||
unallocated_nodes_num_range=(0, 20)
|
||||
unallocated_nodes_num_range=(0, 20),
|
||||
is_filtered_values=(True, False, None)
|
||||
):
|
||||
mn_uid = '{}'.format(uuid.uuid4())
|
||||
clusters_num = random.randint(*clusters_num_range)
|
||||
|
@ -166,7 +167,8 @@ class ElasticTest(TestCase):
|
|||
'clusters': [],
|
||||
'unallocated_nodes_num_range': random.randint(
|
||||
*unallocated_nodes_num_range),
|
||||
'allocated_nodes_num': 0
|
||||
'allocated_nodes_num': 0,
|
||||
'is_filtered': random.choice(is_filtered_values)
|
||||
}
|
||||
|
||||
for _ in xrange(clusters_num):
|
||||
|
@ -175,10 +177,12 @@ class ElasticTest(TestCase):
|
|||
structure['allocated_nodes_num'] += cluster['nodes_num']
|
||||
return structure
|
||||
|
||||
def generate_data(self, installations_num=100):
|
||||
def generate_data(self, installations_num=100,
|
||||
is_filtered_values=(True, False, None)):
|
||||
structures = []
|
||||
for _ in xrange(installations_num):
|
||||
structure = self.generate_structure()
|
||||
structure = self.generate_structure(
|
||||
is_filtered_values=is_filtered_values)
|
||||
self.es.index(config.INDEX_FUEL, config.DOC_TYPE_STRUCTURE,
|
||||
body=structure, id=structure['master_node_uid'])
|
||||
structures.append(structure)
|
||||
|
@ -191,7 +195,8 @@ class MigrationTest(ElasticTest, DbTest):
|
|||
def setUp(self):
|
||||
super(MigrationTest, self).setUp()
|
||||
|
||||
def create_dumb_structure(self, set_md=True):
|
||||
def create_dumb_structure(self, set_md=True,
|
||||
is_filtered_values=(True, False, None)):
|
||||
mn_uid = '{}'.format(uuid.uuid4())
|
||||
structure = {
|
||||
'master_node_uid': mn_uid,
|
||||
|
@ -214,11 +219,13 @@ class MigrationTest(ElasticTest, DbTest):
|
|||
m_date = now
|
||||
else:
|
||||
m_date = None
|
||||
is_filtered = random.choice(is_filtered_values)
|
||||
db_session.add(InstallationStructure(master_node_uid=mn_uid,
|
||||
structure=structure,
|
||||
creation_date=now,
|
||||
modification_date=m_date))
|
||||
db_session.commit()
|
||||
modification_date=m_date,
|
||||
is_filtered=is_filtered))
|
||||
db_session.flush()
|
||||
return mn_uid
|
||||
|
||||
def _action_name(self):
|
||||
|
|
|
@ -69,13 +69,34 @@ class Reports(ElasticTest):
|
|||
self.assertDictEqual(expected_distr, actual_distr)
|
||||
|
||||
def test_installations_number(self):
|
||||
# Generating not filtered data
|
||||
installations_num = 150
|
||||
installations = self.generate_data(installations_num=installations_num)
|
||||
installations = self.generate_data(
|
||||
installations_num=installations_num,
|
||||
is_filtered_values=(None, False)
|
||||
)
|
||||
# Generating filtered data
|
||||
self.generate_data(
|
||||
installations_num=70,
|
||||
is_filtered_values=(True,)
|
||||
)
|
||||
release = "6.0-ga"
|
||||
query = {
|
||||
"query": {
|
||||
"terms": {
|
||||
"fuel_release.release": [release]
|
||||
"filtered": {
|
||||
"filter": {
|
||||
"bool": {
|
||||
"should": [
|
||||
{"term": {"is_filtered": False}},
|
||||
{"missing": {"field": "is_filtered"}},
|
||||
],
|
||||
"must": {
|
||||
"terms": {
|
||||
"fuel_release.release": [release]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,11 +164,8 @@ class Reports(ElasticTest):
|
|||
sum(d['doc_count'] for d in libvirt_types['buckets'])
|
||||
)
|
||||
|
||||
def test_filtration_by_releases(self):
|
||||
installations_num = 100
|
||||
self.generate_data(installations_num=installations_num)
|
||||
|
||||
# Fetching available releases
|
||||
def _get_releases_data(self):
|
||||
# Fetching available releases info
|
||||
query = {
|
||||
"size": 0,
|
||||
"aggs": {
|
||||
|
@ -161,44 +179,30 @@ class Reports(ElasticTest):
|
|||
resp = self.es.search(index=config.INDEX_FUEL,
|
||||
doc_type=config.DOC_TYPE_STRUCTURE,
|
||||
body=query)
|
||||
releases_data = dict([(d['key'], d['doc_count']) for d in
|
||||
resp['aggregations']['releases']['buckets']])
|
||||
return dict([(d['key'], d['doc_count']) for d in
|
||||
resp['aggregations']['releases']['buckets']])
|
||||
|
||||
# Adding filtration by the releases into libvirt distribution query
|
||||
def _query_libvirt_distribution(self):
|
||||
statuses = ["operational", "error"]
|
||||
filter_by_release = six.iterkeys(releases_data).next()
|
||||
query = {
|
||||
"size": 0,
|
||||
"aggs": {
|
||||
"releases": {
|
||||
"filter": {
|
||||
"terms": {
|
||||
"fuel_release.release": [filter_by_release]
|
||||
}
|
||||
},
|
||||
|
||||
"aggs": {
|
||||
"clusters": {
|
||||
"nested": {
|
||||
"path": "clusters"
|
||||
},
|
||||
"aggs": {
|
||||
"statuses": {
|
||||
"filter": {
|
||||
"terms": {"status": statuses}
|
||||
},
|
||||
"aggs": {
|
||||
"attributes": {
|
||||
"nested": {
|
||||
"path": "clusters.attributes"
|
||||
},
|
||||
"aggs": {
|
||||
"libvirt_types": {
|
||||
"terms": {
|
||||
"field": "libvirt_type"
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
"clusters": {
|
||||
"nested": {
|
||||
"path": "clusters"
|
||||
},
|
||||
"aggs": {
|
||||
"statuses": {
|
||||
"filter": {
|
||||
"terms": {"status": statuses}
|
||||
},
|
||||
"aggs": {
|
||||
"attributes": {
|
||||
"nested": {
|
||||
"path": "clusters.attributes"
|
||||
},
|
||||
"aggs": {
|
||||
"libvirt_types": {
|
||||
"terms": {
|
||||
"field": "libvirt_type"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -208,6 +212,46 @@ class Reports(ElasticTest):
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _query_filter_by_release(self, releases, query):
|
||||
return {
|
||||
"releases": {
|
||||
"filter": {
|
||||
"terms": {
|
||||
"fuel_release.release": releases
|
||||
}
|
||||
},
|
||||
"aggs": query
|
||||
}
|
||||
}
|
||||
|
||||
def _query_filter_by_is_filtered(self, query):
|
||||
return {
|
||||
"is_filtered": {
|
||||
"filter": {
|
||||
"bool": {
|
||||
"should": [
|
||||
{"term": {"is_filtered": False}},
|
||||
{"missing": {"field": "is_filtered"}},
|
||||
]
|
||||
}
|
||||
},
|
||||
"aggs": query
|
||||
}
|
||||
}
|
||||
|
||||
def test_filtration_by_releases(self):
|
||||
installations_num = 100
|
||||
self.generate_data(installations_num=installations_num)
|
||||
|
||||
# Adding filtration by the releases into libvirt distribution query
|
||||
releases_data = self._get_releases_data()
|
||||
filter_by_release = six.iterkeys(releases_data).next()
|
||||
query = {
|
||||
"size": 0,
|
||||
"aggs": self._query_filter_by_release(
|
||||
[filter_by_release], self._query_libvirt_distribution())
|
||||
}
|
||||
resp = self.es.search(index=config.INDEX_FUEL,
|
||||
doc_type=config.DOC_TYPE_STRUCTURE,
|
||||
body=query)
|
||||
|
@ -216,3 +260,47 @@ class Reports(ElasticTest):
|
|||
# checking releases are filtered
|
||||
self.assertEquals(releases_data[filter_by_release],
|
||||
filtered_releases['doc_count'])
|
||||
|
||||
def test_filtration_by_is_filtered(self):
|
||||
# Query for fetching libvirt distribution
|
||||
libvirt_query = self._query_libvirt_distribution()
|
||||
|
||||
# Query for filtration by is_filtered
|
||||
query = {
|
||||
"size": 0,
|
||||
"aggs": self._query_filter_by_is_filtered(libvirt_query)
|
||||
}
|
||||
|
||||
# Checking filtered docs aren't fetched
|
||||
filtered_num = 15
|
||||
self.generate_data(installations_num=filtered_num,
|
||||
is_filtered_values=(True,))
|
||||
|
||||
resp = self.es.search(index=config.INDEX_FUEL,
|
||||
doc_type=config.DOC_TYPE_STRUCTURE,
|
||||
body=query)
|
||||
docs = resp['aggregations']['is_filtered']
|
||||
self.assertEqual(0, docs['doc_count'])
|
||||
|
||||
# Checking false filtered docs are fetched
|
||||
not_filtered_num = 20
|
||||
self.generate_data(installations_num=not_filtered_num,
|
||||
is_filtered_values=(False,))
|
||||
|
||||
resp = self.es.search(index=config.INDEX_FUEL,
|
||||
doc_type=config.DOC_TYPE_STRUCTURE,
|
||||
body=query)
|
||||
docs = resp['aggregations']['is_filtered']
|
||||
self.assertEqual(not_filtered_num, docs['doc_count'])
|
||||
|
||||
# Checking None filtered docs are fetched
|
||||
none_filtered_num = 25
|
||||
self.generate_data(installations_num=none_filtered_num,
|
||||
is_filtered_values=(None,))
|
||||
|
||||
resp = self.es.search(index=config.INDEX_FUEL,
|
||||
doc_type=config.DOC_TYPE_STRUCTURE,
|
||||
body=query)
|
||||
docs = resp['aggregations']['is_filtered']
|
||||
self.assertEqual(not_filtered_num + none_filtered_num,
|
||||
docs['doc_count'])
|
||||
|
|
|
@ -20,7 +20,8 @@ from mock import patch
|
|||
from migration.test.base import MigrationTest
|
||||
|
||||
from migration.migrator import Migrator
|
||||
from migration.model import ActionLog
|
||||
from migration.model import ActionLog as AL
|
||||
from migration.model import InstallationStructure as IS
|
||||
|
||||
|
||||
class MigratorTest(MigrationTest):
|
||||
|
@ -155,6 +156,25 @@ class MigratorTest(MigrationTest):
|
|||
self.assertIsNotNone(source['creation_date'])
|
||||
self.assertIsNone(source['modification_date'])
|
||||
|
||||
def test_filtered_installation_structure_migration(self):
|
||||
docs_num = 100
|
||||
for _ in xrange(docs_num):
|
||||
self.create_dumb_structure()
|
||||
|
||||
migrator = Migrator()
|
||||
is_filtered_variants = set(obj.is_filtered for obj in
|
||||
migrator.db_session.query(IS).all())
|
||||
self.assertEqual(set((True, False, None)), is_filtered_variants)
|
||||
|
||||
sync_info = migrator.get_sync_info(config.STRUCTURES_DB_TABLE_NAME)
|
||||
indexed_docs_before = self.get_indexed_docs_num(sync_info)
|
||||
|
||||
migrator.migrate_installation_structure()
|
||||
self.es.indices.refresh(index=config.INDEX_FUEL)
|
||||
|
||||
indexed_docs_after = self.get_indexed_docs_num(sync_info)
|
||||
self.assertEqual(indexed_docs_before + docs_num, indexed_docs_after)
|
||||
|
||||
@patch('migration.config.DB_SYNC_CHUNK_SIZE', 2)
|
||||
def test_null_modification_date_migration(self):
|
||||
docs_num = 5
|
||||
|
@ -209,8 +229,8 @@ class MigratorTest(MigrationTest):
|
|||
new_sync_info = migrator.get_sync_info(
|
||||
config.ACTION_LOGS_DB_TABLE_NAME)
|
||||
time_of_sync = parser.parse(new_sync_info.last_sync_time)
|
||||
last_obj = migrator.db_session.query(ActionLog).order_by(
|
||||
ActionLog.id.desc()).first()
|
||||
last_obj = migrator.db_session.query(AL).order_by(
|
||||
AL.id.desc()).first()
|
||||
|
||||
# checking sync time is updated
|
||||
self.assertLessEqual(time_before, time_of_sync)
|
||||
|
|
Loading…
Reference in New Issue