From a5be14f2fee59622b104b7797a449e21f4319978 Mon Sep 17 00:00:00 2001 From: Dawid Deja Date: Fri, 2 Dec 2016 11:14:17 +0100 Subject: [PATCH] Fix for failing kombu gate Mysql is rounding the microseconds to one second, which leads to time inconsistency beetwen what is returned to cllient and what is stored in the DB. This patch changes the behaviour, so that no mircoseconds gets genereated. Closes-bug: 1644881 Change-Id: I514c1d5154b3c658ec74c88b800d2a3ded1fdad9 --- mistral/db/sqlalchemy/model_base.py | 2 ++ .../unit/db/v2/test_sqlalchemy_db_api.py | 29 ++++++++++------- .../unit/engine/test_direct_workflow_rerun.py | 16 ++++++---- .../engine/test_reverse_workflow_rerun.py | 31 ++++++++++++++----- mistral/tests/unit/engine/test_state_info.py | 21 +++++++++++-- mistral/utils/__init__.py | 8 +++++ 6 files changed, 80 insertions(+), 27 deletions(-) diff --git a/mistral/db/sqlalchemy/model_base.py b/mistral/db/sqlalchemy/model_base.py index 7b053c4f..7975b027 100644 --- a/mistral/db/sqlalchemy/model_base.py +++ b/mistral/db/sqlalchemy/model_base.py @@ -122,6 +122,8 @@ class MistralSecureModelBase(MistralModelBase): scope = sa.Column(sa.String(80), default='private') project_id = sa.Column(sa.String(80), default=security.get_project_id) + created_at = sa.Column(sa.DateTime, default=lambda: utils.utc_now_sec()) + updated_at = sa.Column(sa.DateTime, onupdate=lambda: utils.utc_now_sec()) def _set_project_id(target, value, oldvalue, initiator): diff --git a/mistral/tests/unit/db/v2/test_sqlalchemy_db_api.py b/mistral/tests/unit/db/v2/test_sqlalchemy_db_api.py index 5fc0a3a6..c8549f8f 100644 --- a/mistral/tests/unit/db/v2/test_sqlalchemy_db_api.py +++ b/mistral/tests/unit/db/v2/test_sqlalchemy_db_api.py @@ -40,7 +40,8 @@ WORKBOOKS = [ 'scope': 'public', 'updated_at': None, 'project_id': '1233', - 'trust_id': '1234' + 'trust_id': '1234', + 'created_at': datetime.datetime(2016, 12, 1, 15, 0, 0) }, { 'name': 'my_workbook2', @@ -51,7 +52,8 @@ WORKBOOKS = [ 'scope': 'private', 'updated_at': None, 'project_id': '1233', - 'trust_id': '12345' + 'trust_id': '12345', + 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0) }, ] @@ -368,7 +370,8 @@ WF_DEFINITIONS = [ 'tags': ['mc'], 'scope': 'public', 'project_id': '1233', - 'trust_id': '1234' + 'trust_id': '1234', + 'created_at': datetime.datetime(2016, 12, 1, 15, 0, 0) }, { 'name': 'my_wf2', @@ -377,7 +380,8 @@ WF_DEFINITIONS = [ 'tags': ['mc'], 'scope': 'private', 'project_id': '1233', - 'trust_id': '12345' + 'trust_id': '12345', + 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0) }, ] @@ -830,7 +834,8 @@ ACTION_DEFINITIONS = [ 'is_system': True, 'action_class': 'mypackage.my_module.Action1', 'attributes': None, - 'project_id': '' + 'project_id': '', + 'created_at': datetime.datetime(2016, 12, 1, 15, 0, 0) }, { 'name': 'action2', @@ -838,7 +843,8 @@ ACTION_DEFINITIONS = [ 'is_system': True, 'action_class': 'mypackage.my_module.Action2', 'attributes': None, - 'project_id': '' + 'project_id': '', + 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0) }, { 'name': 'action3', @@ -847,7 +853,8 @@ ACTION_DEFINITIONS = [ 'tags': ['mc', 'abc'], 'action_class': 'mypackage.my_module.Action3', 'attributes': None, - 'project_id': '' + 'project_id': '', + 'created_at': datetime.datetime(2016, 12, 1, 15, 2, 0) }, ] @@ -1308,7 +1315,7 @@ WF_EXECS = [ 'start_params': {'task': 'my_task1'}, 'state': 'IDLE', 'state_info': "Running...", - 'created_at': None, + 'created_at': datetime.datetime(2016, 12, 1, 15, 0, 0), 'updated_at': None, 'context': None, 'task_id': None, @@ -1321,7 +1328,7 @@ WF_EXECS = [ 'start_params': {'task': 'my_task1'}, 'state': 'RUNNING', 'state_info': "Running...", - 'created_at': None, + 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0), 'updated_at': None, 'context': {'image_id': '123123'}, 'task_id': None, @@ -1647,7 +1654,7 @@ TASK_EXECS = [ 'tags': ['deployment'], 'in_context': None, 'runtime_context': None, - 'created_at': None, + 'created_at': datetime.datetime(2016, 12, 1, 15, 0, 0), 'updated_at': None }, { @@ -1660,7 +1667,7 @@ TASK_EXECS = [ 'tags': ['deployment'], 'in_context': {'image_id': '123123'}, 'runtime_context': None, - 'created_at': None, + 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0), 'updated_at': None }, ] diff --git a/mistral/tests/unit/engine/test_direct_workflow_rerun.py b/mistral/tests/unit/engine/test_direct_workflow_rerun.py index 1736d468..7d097791 100644 --- a/mistral/tests/unit/engine/test_direct_workflow_rerun.py +++ b/mistral/tests/unit/engine/test_direct_workflow_rerun.py @@ -759,13 +759,17 @@ class DirectWorkflowRerunTest(base.EngineTestCase): 'Task 1.2 [%s]' % updated_env['var1'] # Task 1 item 2 (rerun). ] - result = zip(task_1_action_exs, expected_inputs) + # Assert that every expected input is in actual task input. + for action_ex in task_1_action_exs: + self.assertIn(action_ex.input['output'], expected_inputs) - for (action_ex, expected_input) in result: - self.assertDictEqual( - {'output': expected_input}, - action_ex.input - ) + # Assert that there was same number of unique inputs as action execs. + self.assertEqual( + len(task_1_action_exs), + len(set( + [action_ex.input['output'] for action_ex in task_1_action_exs] + )) + ) # Check action executions of task 2. self.assertEqual(states.SUCCESS, task_2_ex.state) diff --git a/mistral/tests/unit/engine/test_reverse_workflow_rerun.py b/mistral/tests/unit/engine/test_reverse_workflow_rerun.py index 555bb24d..a9d8e88a 100644 --- a/mistral/tests/unit/engine/test_reverse_workflow_rerun.py +++ b/mistral/tests/unit/engine/test_reverse_workflow_rerun.py @@ -269,16 +269,33 @@ class ReverseWorkflowRerunTest(base.EngineTestCase): ) self.assertEqual(2, len(task_2_action_exs)) - self.assertEqual(states.ERROR, task_2_action_exs[0].state) - self.assertEqual(states.SUCCESS, task_2_action_exs[1].state) - self.assertDictEqual( - {'output': env['var1']}, - task_2_action_exs[0].input + # Assert that one action ex is in error and one in success states. + self.assertIn( + task_2_action_exs[0].state, + [states.ERROR, states.SUCCESS] + ) + self.assertIn( + task_2_action_exs[1].state, + [states.ERROR, states.SUCCESS] + ) + self.assertNotEqual( + task_2_action_exs[0].state, + task_2_action_exs[1].state ) - self.assertDictEqual( - {'output': updated_env['var1']}, + # Assert that one action ex got first env and one got second env + self.assertIn( + task_2_action_exs[0].input['output'], + [env['var1'], updated_env['var1']] + ) + self.assertIn( + task_2_action_exs[1].input['output'], + [env['var1'], updated_env['var1']] + ) + + self.assertNotEqual( + task_2_action_exs[0].input, task_2_action_exs[1].input ) diff --git a/mistral/tests/unit/engine/test_state_info.py b/mistral/tests/unit/engine/test_state_info.py index b7339003..f92a62ac 100644 --- a/mistral/tests/unit/engine/test_state_info.py +++ b/mistral/tests/unit/engine/test_state_info.py @@ -151,6 +151,21 @@ class ExecutionStateInfoTest(base.EngineTestCase): ) self.assertEqual(3, len(task_1_action_exs)) - self.assertIn(task_1_action_exs[0].id, wf_ex.state_info) - self.assertNotIn(task_1_action_exs[1].id, wf_ex.state_info) - self.assertIn(task_1_action_exs[2].id, wf_ex.state_info) + + error_actions = [ + action_ex for action_ex in task_1_action_exs if + action_ex.state == states.ERROR + ] + self.assertEqual(2, len(error_actions)) + + success_actions = [ + action_ex for action_ex in task_1_action_exs if + action_ex.state == states.SUCCESS + ] + self.assertEqual(1, len(success_actions)) + + for action_ex in error_actions: + self.assertIn(action_ex.id, wf_ex.state_info) + + for action_ex in success_actions: + self.assertNotIn(action_ex.id, wf_ex.state_info) diff --git a/mistral/utils/__init__.py b/mistral/utils/__init__.py index 25022548..8662f87d 100644 --- a/mistral/utils/__init__.py +++ b/mistral/utils/__init__.py @@ -31,6 +31,7 @@ import threading import eventlet from eventlet import corolocal from oslo_concurrency import processutils +from oslo_utils import timeutils from oslo_utils import uuidutils import pkg_resources as pkg import random @@ -364,3 +365,10 @@ def generate_key_pair(key_length=2048): public_key = open(public_key_path).read() return private_key, public_key + + +def utc_now_sec(): + """Returns current time and drops microseconds.""" + + d = timeutils.utcnow() + return d.replace(microsecond=0)