Delete outdated entities by the consistency

As part of Rocky fast-failover support,
vitrage-graph is now reloaded from the
database. This causes an issue with
datasources using caches, that can become
outdated in two cases

- After vitrage-graph restart. This is
handled by the consistency service.

- If more than one vitrage-collector is used.

The solution/workaround for Rocky is to let
the consistency enforcer delete outdated
entities for datasources that specifically
request it.

For Stein, we should move this handling to
the processor (so the entities will be
deleted immediately) and delete all the
caches

Change-Id: I953137dc870f48ed42acba93789bb947809c41cc
This commit is contained in:
Ifat Afek 2018-08-02 11:31:46 +00:00
parent fe8e50721b
commit cf842b7723
21 changed files with 260 additions and 37 deletions

View File

@ -0,0 +1,8 @@
---
issues:
- As part of Rocky fast-failover support, vitrage-graph is now reloaded from
the database. This causes an issue with datasources using caches, that can
become outdated in two cases
- After vitrage-graph restart. This is handled by the consistency service.
- If more than one vitrage-collector is used.
Please avoid running multiple vitrage-collector services.

View File

@ -47,6 +47,7 @@ class VertexProperties(ElementProperties):
VITRAGE_RESOURCE_TYPE = 'vitrage_resource_type'
RESOURCE = 'resource'
IS_REAL_VITRAGE_ID = 'is_real_vitrage_id'
VITRAGE_DATASOURCE_NAME = 'vitrage_datasource_name'
class EdgeProperties(ElementProperties):

View File

@ -20,6 +20,7 @@ from oslo_log import log
from vitrage.common.constants import DatasourceAction
from vitrage.common.constants import DatasourceProperties as DSProps
from vitrage.common.constants import GraphAction
from vitrage.common.constants import VertexProperties as VProps
from vitrage.utils import datetime as datetime_utils
LOG = log.getLogger(__name__)
@ -28,6 +29,8 @@ LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class DriverBase(object):
_datasource_name = None
def __init__(self):
pass
@ -71,6 +74,7 @@ class DriverBase(object):
cls._add_entity_type(entity, entity_type)
cls._add_datasource_action(entity, datasource_action)
cls._add_sampling_time(entity)
entity[VProps.VITRAGE_DATASOURCE_NAME] = cls._datasource_name
pickleable_entities.append(entity)
return pickleable_entities
@ -136,3 +140,16 @@ class DriverBase(object):
def properties_to_filter_out():
"""Return a list of properties to be removed from the event"""
return []
@staticmethod
def should_delete_outdated_entities():
"""Should the processor delete entities when become outdated
An entity that was not updated in the last get_all is considered
outdated. If this method returns true, then it will be automatically
deleted when outdated.
Note that this behavior does not suit all datasources - datasources
that are based only on notifications do not update their entities in
get_all, so they should return False.
"""
return False

View File

@ -99,3 +99,7 @@ class NagiosDriver(AlarmDriverBase):
def _is_valid(self, alarm):
return alarm[NagiosProps.RESOURCE_TYPE] is not None and \
alarm[NagiosProps.RESOURCE_NAME] is not None
@staticmethod
def should_delete_outdated_entities():
return True

View File

@ -61,6 +61,10 @@ class StaticDriver(DriverBase):
STATIC_DATASOURCE,
datasource_action)
@staticmethod
def should_delete_outdated_entities():
return True
def _get_and_cache_all_entities(self):
self.entities_cache = self._get_all_entities()
return self.entities_cache

View File

@ -20,10 +20,12 @@ from vitrage.utils import opt_exists
drivers = {}
# noinspection PyProtectedMember
def get_drivers_by_name(conf, driver_names):
for d_name in driver_names:
if not drivers.get(d_name):
drivers[d_name] = utils.import_object(conf[d_name].driver, conf)
drivers[d_name].__class__._datasource_name = d_name
return [drivers[d_name] for d_name in driver_names]
@ -36,3 +38,7 @@ def get_pull_drivers_names(conf):
def get_push_drivers_names(conf):
return [name for name in conf.datasources.types
if conf[name].update_method.lower() == UpdateMethod.PUSH]
def get_driver_class(conf, driver_name):
return utils.import_class(conf[driver_name].driver)

View File

@ -24,6 +24,7 @@ from vitrage.common.constants import GraphAction
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.consistency import CONSISTENCY_DATASOURCE
from vitrage.datasources import OPENSTACK_CLUSTER
from vitrage.datasources import utils
from vitrage.entity_graph import EVALUATOR_TOPIC
from vitrage.evaluator.actions.evaluator_event_transformer \
import VITRAGE_DATASOURCE
@ -43,7 +44,9 @@ class ConsistencyEnforcer(object):
self.actions_callback = actions_callback or VitrageNotifier(
conf, 'vitrage_consistency', [EVALUATOR_TOPIC]).notify
self.graph = entity_graph
self._init_datasources_to_mark_deleted()
# noinspection PyBroadException
def periodic_process(self):
try:
LOG.info('Periodic consistency check..')
@ -54,16 +57,16 @@ class ConsistencyEnforcer(object):
self._push_events_to_queue(old_deleted_entities,
GraphAction.REMOVE_DELETED_ENTITY)
stale_entities = self._find_placeholder_entities()
LOG.debug('Found %s vertices to be marked as deleted by '
'consistency service: %s', len(stale_entities),
stale_entities = self._find_outdated_entities_to_mark_as_deleted()
LOG.debug('Found %s outdated vertices to be marked as deleted '
'by the consistency service: %s', len(stale_entities),
stale_entities)
self._push_events_to_queue(stale_entities,
GraphAction.DELETE_ENTITY)
except Exception:
LOG.exception('Error in deleting vertices from entity_graph.')
def _find_placeholder_entities(self):
def _find_outdated_entities_to_mark_as_deleted(self):
vitrage_sample_tstmp = str(utcnow() - timedelta(
seconds=2 * self.conf.datasources.snapshots_interval))
query = {
@ -71,13 +74,11 @@ class ConsistencyEnforcer(object):
{'!=': {VProps.VITRAGE_TYPE: VITRAGE_DATASOURCE}},
{'<': {VProps.VITRAGE_SAMPLE_TIMESTAMP: vitrage_sample_tstmp}},
{'==': {VProps.VITRAGE_IS_DELETED: False}},
{'==': {VProps.VITRAGE_IS_PLACEHOLDER: True}},
]
}
vertices = self.graph.get_vertices(query_dict=query)
return set(self._filter_vertices_to_be_deleted(vertices))
return set(self._filter_vertices_to_be_marked_as_deleted(vertices))
def _find_old_deleted_entities(self):
vitrage_sample_tstmp = str(utcnow() - timedelta(
@ -115,6 +116,20 @@ class ConsistencyEnforcer(object):
not (ver[VProps.VITRAGE_CATEGORY] == EntityCategory.RESOURCE and
ver[VProps.VITRAGE_TYPE] == OPENSTACK_CLUSTER), vertices))
def _filter_vertices_to_be_marked_as_deleted(self, vertices):
return list(filter(self._should_delete_vertex, vertices))
def _should_delete_vertex(self, vertex):
"""Decide which vertices should be deleted by the consistency
- delete all placeholder vertices, except from the openstack.cluster
- delete vertices that their datasource is in the list
"""
return (vertex.get(VProps.VITRAGE_IS_PLACEHOLDER) and
not vertex[VProps.VITRAGE_TYPE] == OPENSTACK_CLUSTER) or \
(vertex.get(VProps.VITRAGE_DATASOURCE_NAME) in
self.datasources_to_mark_deleted)
def _wait_for_action(self, function):
count_retries = 0
while True:
@ -127,3 +142,14 @@ class ConsistencyEnforcer(object):
count_retries += 1
time.sleep(self.conf.consistency.initialization_interval)
def _init_datasources_to_mark_deleted(self):
self.datasources_to_mark_deleted = []
for driver_name in self.conf.datasources.types:
driver_class = utils.get_driver_class(self.conf, driver_name)
if driver_class.should_delete_outdated_entities():
self.datasources_to_mark_deleted.append(driver_name)
LOG.info('Vertices of the following datasources will be deleted if '
'they become outdated: %s', self.datasources_to_mark_deleted)

View File

@ -39,18 +39,21 @@ class DatasourceInfoMapper(object):
self.category_normalizer = self._init_category_normalizer()
self.datasources_value_confs = self._load_value_configurations()
def vitrage_operational_value(self, datasource_name, value):
return self._get_value_data(datasource_name,
def vitrage_operational_value(self, vitrage_type, value):
return self._get_value_data(vitrage_type,
value,
self.OPERATIONAL_VALUES)
def value_priority(self, datasource_name, value):
return self._get_value_data(datasource_name,
def value_priority(self, vitrage_type, value):
return self._get_value_data(vitrage_type,
value,
self.PRIORITY_VALUES)
def vitrage_aggregate_values(self, new_vertex, graph_vertex):
datasource_name = new_vertex[VProps.VITRAGE_TYPE] if \
LOG.debug('new_vertex: %s', new_vertex)
LOG.debug('graph_vertex: %s', graph_vertex)
vitrage_type = new_vertex[VProps.VITRAGE_TYPE] if \
VProps.VITRAGE_TYPE in new_vertex.properties else \
graph_vertex[VProps.VITRAGE_TYPE]
@ -58,15 +61,15 @@ class DatasourceInfoMapper(object):
VProps.VITRAGE_CATEGORY in new_vertex.properties else \
graph_vertex[VProps.VITRAGE_CATEGORY]
if datasource_name in self.datasources_value_confs or \
datasource_name not in self.conf.datasources.types:
if vitrage_type in self.datasources_value_confs or \
vitrage_type not in self.conf.datasources.types:
value_properties = \
self.category_normalizer[vitrage_category].value_properties()
vitrage_operational_value, vitrage_aggregated_value, value_priority = \
self._find_operational_value_and_priority(new_vertex,
graph_vertex,
value_properties[0],
datasource_name)
vitrage_type)
value_properties.pop(0)
for property_ in value_properties:
@ -74,7 +77,7 @@ class DatasourceInfoMapper(object):
self._find_operational_value_and_priority(new_vertex,
graph_vertex,
property_,
datasource_name)
vitrage_type)
if t_value_priority > value_priority:
vitrage_operational_value = t_operational_value
vitrage_aggregated_value = t_aggregated_value
@ -90,9 +93,9 @@ class DatasourceInfoMapper(object):
self.category_normalizer[vitrage_category].set_operational_value(
new_vertex, self.UNDEFINED_DATASOURCE)
def get_datasource_priorities(self, datasource_name=None):
if datasource_name:
datasource_info = self.datasources_value_confs[datasource_name]
def get_datasource_priorities(self, vitrage_type=None):
if vitrage_type:
datasource_info = self.datasources_value_confs[vitrage_type]
return datasource_info[self.PRIORITY_VALUES]
else:
priorities_dict = \
@ -176,13 +179,13 @@ class DatasourceInfoMapper(object):
operational_value, full_path,
state_class_instance.__class__.__name__)
def _get_value_data(self, datasource_name, value, data_type):
def _get_value_data(self, vitrage_type, value, data_type):
try:
upper_value = value if not value else value.upper()
if datasource_name in self.datasources_value_confs:
if vitrage_type in self.datasources_value_confs:
values_conf = self.datasources_value_confs[
datasource_name][data_type]
vitrage_type][data_type]
return values_conf[upper_value] if upper_value in values_conf \
else values_conf[None]
@ -193,14 +196,14 @@ class DatasourceInfoMapper(object):
return values_conf[upper_value] if upper_value in values_conf \
else values_conf[None]
except Exception:
LOG.error('Exception in datasource: %s', datasource_name)
LOG.error('Exception in datasource: %s', vitrage_type)
raise
def _find_operational_value_and_priority(self,
new_vertex,
graph_vertex,
property_,
datasource_name):
vitrage_type):
state = self._get_updated_property(new_vertex,
graph_vertex,
property_)
@ -208,9 +211,9 @@ class DatasourceInfoMapper(object):
upper_state = state if not state else state.upper()
vitrage_operational_state = self.vitrage_operational_value(
datasource_name, upper_state)
vitrage_type, upper_state)
value_priority = self.value_priority(datasource_name,
value_priority = self.value_priority(vitrage_type,
upper_state)
return vitrage_operational_state, upper_state, value_priority

View File

@ -54,11 +54,13 @@ class Processor(processor.ProcessorBase):
self._enrich_event(event)
entity = self.transformer_manager.transform(event)
if entity.action not in self.actions.keys():
LOG.debug('deprecated or unknown entity %s ignored', str(entity))
return
self._calculate_vitrage_aggregated_values(entity.vertex, entity.action)
self._set_datasource_name(entity, event)
self.actions[entity.action](entity.vertex, entity.neighbors)
def create_entity(self, new_vertex, neighbors):
@ -360,3 +362,10 @@ class Processor(processor.ProcessorBase):
alarm[VProps.VITRAGE_RESOURCE_ID] = r_id
alarm[VProps.VITRAGE_RESOURCE_TYPE] = r_type
alarm[VProps.VITRAGE_RESOURCE_PROJECT_ID] = r_project_id
@staticmethod
def _set_datasource_name(entity, event):
if entity.vertex and entity.action == GraphAction.CREATE_ENTITY:
datasource_name = event.get(VProps.VITRAGE_DATASOURCE_NAME)
entity.vertex.properties[VProps.VITRAGE_DATASOURCE_NAME] = \
datasource_name

View File

@ -30,7 +30,8 @@ def create_vertex(vitrage_id,
update_timestamp=None,
project_id=None,
vitrage_resource_project_id=None,
metadata=None):
metadata=None,
datasource_name=None):
"""A builder to create a vertex
:param vitrage_id:
@ -55,6 +56,8 @@ def create_vertex(vitrage_id,
:type vitrage_is_placeholder: boolean
:param project_id:
:type project_id: str
:param datasource_name:
:type datasource_name: str
:return:
:rtype: Vertex
"""
@ -71,6 +74,7 @@ def create_vertex(vitrage_id,
VConst.VITRAGE_ID: vitrage_id,
VConst.PROJECT_ID: project_id,
VConst.VITRAGE_RESOURCE_PROJECT_ID: vitrage_resource_project_id,
VConst.VITRAGE_DATASOURCE_NAME: datasource_name,
}
if metadata:
properties.update(metadata)

View File

@ -39,6 +39,7 @@ from vitrage.tests.mocks import utils
from vitrage.utils.datetime import utcnow
# noinspection PyProtectedMember
class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
CONSISTENCY_OPTS = [
@ -124,12 +125,82 @@ class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
})
self.assertThat(instance_vertices,
matchers.HasLength(self.NUM_INSTANCES - 3))
# number of resources:
# number of vertices - 3 (deleted instances)
# number of nics - 1
# number of volumes - 1
self.assertThat(self.processor.entity_graph.get_vertices(),
matchers.HasLength(
self._num_total_expected_vertices() - 3)
# 3 instances deleted
self._num_total_expected_vertices() - 3 +
3 - 1 + # one nic deleted
3 - 1) # one cinder.volume deleted
)
self.assertThat(deleted_instance_vertices, matchers.HasLength(3))
# one nic was deleted, one marked as deleted, one untouched
self._assert_vertices_status('nic', 2, 1)
# one cinder.volume deleted, other two are untouched
# cinder.volume vertices should not be marked as deleted, since the
# datasource did not ask to delete outdated vertices.
self._assert_vertices_status('cinder.volume', 2, 0)
def test_should_delete_vertex(self):
# should be deleted because the static datasource asks to delete its
# outdated vertices
static_vertex = {VProps.VITRAGE_DATASOURCE_NAME: 'static'}
self.assertTrue(
self.consistency_enforcer._should_delete_vertex(static_vertex))
# should not be deleted because the cinder datasource does not ask to
# delete its outdated vertices
volume_vertex = {VProps.VITRAGE_DATASOURCE_NAME: 'cinder.volume'}
self.assertFalse(
self.consistency_enforcer._should_delete_vertex(volume_vertex))
# should be deleted because it is a placeholder
placeholder_vertex = {VProps.VITRAGE_IS_PLACEHOLDER: True,
VProps.VITRAGE_TYPE: 'cinder.volume'}
self.assertTrue(self.consistency_enforcer.
_should_delete_vertex(placeholder_vertex))
# should be deleted because it is an openstack.cluster
cluster_vertex = {VProps.VITRAGE_IS_PLACEHOLDER: True,
VProps.VITRAGE_TYPE: 'openstack.cluster'}
self.assertFalse(self.consistency_enforcer._should_delete_vertex(
cluster_vertex))
vertices = \
[static_vertex, volume_vertex, placeholder_vertex, cluster_vertex]
vertices_to_mark_deleted = self.consistency_enforcer.\
_filter_vertices_to_be_marked_as_deleted(vertices)
self.assertThat(vertices_to_mark_deleted, matchers.HasLength(2))
self.assertTrue(static_vertex in vertices_to_mark_deleted)
self.assertTrue(placeholder_vertex in vertices_to_mark_deleted)
self.assertFalse(volume_vertex in vertices_to_mark_deleted)
self.assertFalse(cluster_vertex in vertices_to_mark_deleted)
def _assert_vertices_status(self, vitrage_type,
num_vertices, num_marked_deleted):
vertices = \
self.processor.entity_graph.get_vertices({
VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE,
VProps.VITRAGE_TYPE: vitrage_type,
})
self.assertThat(vertices, matchers.HasLength(num_vertices))
marked_deleted_vertices = \
self.processor.entity_graph.get_vertices({
VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE,
VProps.VITRAGE_TYPE: vitrage_type,
VProps.VITRAGE_IS_DELETED: True
})
self.assertThat(marked_deleted_vertices,
matchers.HasLength(num_marked_deleted))
def _periodic_process_setup_stage(self, consistency_interval):
self._create_processor_with_graph(self.conf, processor=self.processor)
current_time = utcnow()
@ -162,6 +233,9 @@ class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
current_time + timedelta(seconds=2 * consistency_interval + 1))
self.processor.entity_graph.update_vertex(instance_vertices[i])
self._add_static_resources(consistency_interval)
self._add_cinder_volume_resources(consistency_interval)
def _set_end_messages(self):
self.initialization_status.end_messages[NOVA_ZONE_DATASOURCE] = True
self.initialization_status.end_messages[NOVA_HOST_DATASOURCE] = True
@ -227,3 +301,50 @@ class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
num_retries += 1
if num_retries == 30:
return
def _add_static_resources(self, consistency_interval):
self._add_resources_with_different_timestamps(
consistency_interval=consistency_interval,
datasource_name='static', resource_type='nic')
def _add_cinder_volume_resources(self, consistency_interval):
self._add_resources_with_different_timestamps(
consistency_interval=consistency_interval,
datasource_name='cinder.volume', resource_type='cinder.volume')
def _add_resources_with_different_timestamps(self, consistency_interval,
datasource_name,
resource_type):
# add resources to the graph:
# - updated_resource
# - outdated_resource with an old timestamp
# - deleted_resource with an old timestamp and is_deleted==true
future_timestamp = \
str(utcnow() + timedelta(seconds=2 * consistency_interval))
past_timestamp = \
str(utcnow() - timedelta(seconds=2 * consistency_interval - 1))
updated_resource = self._create_resource(
vitrage_id=resource_type + '1234', resource_type=resource_type,
datasource_name=datasource_name, sample_timestamp=future_timestamp)
outdated_resource = self._create_resource(
vitrage_id=resource_type + '5678', resource_type=resource_type,
datasource_name=datasource_name, sample_timestamp=past_timestamp)
deleted_resource = self._create_resource(
vitrage_id=resource_type + '9999', resource_type=resource_type,
datasource_name=datasource_name, sample_timestamp=past_timestamp,
is_deleted=True)
self.graph.add_vertex(updated_resource)
self.graph.add_vertex(outdated_resource)
self.graph.add_vertex(deleted_resource)
# get the list of vertices
resource_vertices = self.processor.entity_graph.get_vertices({
VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE,
VProps.VITRAGE_TYPE: resource_type
})
self.assertThat(resource_vertices, matchers.HasLength(3),
'Wrong number of vertices of type %s', resource_type)

View File

@ -56,8 +56,8 @@ class TestActionExecutor(TestFunctionalBase, TestConfiguration):
cls.conf.register_opts(cls.DATASOURCES_OPTS, group='datasources')
cls.add_db(cls.conf)
for datasource_name in cls.conf.datasources.types:
register_opts(cls.conf, datasource_name, cls.conf.datasources.path)
for vitrage_type in cls.conf.datasources.types:
register_opts(cls.conf, vitrage_type, cls.conf.datasources.path)
def _init_executer(self):
event_queue = queue.Queue()

View File

@ -91,3 +91,10 @@ class MockDriver(StaticDriver):
del node[VProps.GRAPH_INDEX]
if VProps.VITRAGE_TYPE in node:
del node[VProps.VITRAGE_TYPE]
@staticmethod
def should_delete_outdated_entities():
# Unlike the static driver (its base class), the mock datasource
# pretends to create real entities that should not be deleted by the
# consistency
return False

View File

@ -14,6 +14,7 @@
"resource_id": "3dcee183-ca42-4ccb-84af-9f0196b2e160",
"vitrage_event_type": "alarm.creation",
"vitrage_entity_type": "aodh",
"vitrage_datasource_name": "aodh",
"vitrage_datasource_action": "snapshot",
"vitrage_sample_date": "2016-11-29T06:31:50.094836",
"graph_query_result": [

View File

@ -14,6 +14,7 @@
"resource_id": "3dcee183-ca42-4ccb-84af-9f0196b2e160",
"vitrage_event_type": "alarm.creation",
"vitrage_entity_type": "aodh",
"vitrage_datasource_name": "aodh",
"vitrage_datasource_action": "update",
"vitrage_sample_date": "2016-11-29T06:31:50.094836",
"graph_query_result": [

View File

@ -2,6 +2,7 @@
"host": "compute-1",
"plugin": "ovs_events",
"vitrage_entity_type" : "collectd",
"vitrage_datasource_name": "collectd",
"vitrage_datasource_action" : "update",
"resource_type": "nova.host",
"resource_name": "compute-1",

View File

@ -2,6 +2,7 @@
"time": "2016-04-12T08:00:00.12345",
"type": "compute.host.down",
"vitrage_entity_type" : "doctor",
"vitrage_datasource_name": "doctor",
"vitrage_datasource_action" : "update",
"vitrage_sample_date": "2016-11-29T06:31:50.094836",
"details": {

View File

@ -10,6 +10,7 @@
"name": "vm[0-9]{3}",
"vitrage_event_type": "update",
"vitrage_entity_type": "nova.instance",
"vitrage_datasource_name": "nova.instance",
"vitrage_sample_date": "2015-12-01T12:46:41Z"
}

View File

@ -1,5 +1,6 @@
{
"vitrage_entity_type" : "prometheus",
"vitrage_datasource_name": "prometheus",
"vitrage_datasource_action" : "update",
"vitrage_sample_date": "2018-05-06T06:31:50.094836",
"status": "firing",

View File

@ -24,6 +24,7 @@ from vitrage.datasources.neutron.port import NEUTRON_PORT_DATASOURCE
from vitrage.datasources.nova.host import NOVA_HOST_DATASOURCE
from vitrage.datasources.nova.instance import NOVA_INSTANCE_DATASOURCE
from vitrage.datasources.nova.zone import NOVA_ZONE_DATASOURCE
from vitrage.datasources.static import STATIC_DATASOURCE
from vitrage.entity_graph.processor import processor as proc
from vitrage.graph.driver.networkx_graph import NXGraph
import vitrage.graph.utils as graph_utils
@ -49,7 +50,8 @@ class TestEntityGraphUnitBase(base.BaseTest):
NOVA_ZONE_DATASOURCE,
NEUTRON_NETWORK_DATASOURCE,
NEUTRON_PORT_DATASOURCE,
CINDER_VOLUME_DATASOURCE],
CINDER_VOLUME_DATASOURCE,
STATIC_DATASOURCE],
help='Names of supported data sources'),
cfg.ListOpt('path',
@ -175,18 +177,23 @@ class TestEntityGraphUnitBase(base.BaseTest):
)
@staticmethod
def _create_resource(vitrage_id, resource_type, project_id=None):
def _create_resource(vitrage_id, resource_type, project_id=None,
datasource_name=None, sample_timestamp=None,
is_deleted=False):
if not datasource_name:
datasource_name = resource_type
return graph_utils.create_vertex(
vitrage_id,
vitrage_category=EntityCategory.RESOURCE,
vitrage_type=resource_type,
vitrage_sample_timestamp=None,
vitrage_sample_timestamp=sample_timestamp,
update_timestamp=str(utcnow()),
vitrage_is_deleted=False,
vitrage_is_deleted=is_deleted,
vitrage_is_placeholder=False,
entity_id=vitrage_id,
entity_state='active',
project_id=project_id
project_id=project_id,
datasource_name=datasource_name,
)
def _num_total_expected_vertices(self):

View File

@ -55,8 +55,8 @@ class TestDatasourceInfoMapper(base.BaseTest):
@staticmethod
def _load_datasources(conf):
for datasource_name in conf.datasources.types:
register_opts(conf, datasource_name, conf.datasources.path)
for vitrage_type in conf.datasources.types:
register_opts(conf, vitrage_type, conf.datasources.path)
# noinspection PyAttributeOutsideInit,PyPep8Naming
@classmethod