From 037a71f6ed841a45d5c478a6d49171e2dfaccb15 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 6 Feb 2014 18:20:56 -0800 Subject: [PATCH] Unify usage of storage error exception type - Instead of formatting the exception by creating a storage error instance we can just let the already included cause make this formatting occur. - Handle cases in impl_dir which were not catching and raising storage errors. - Unify usage of 'backend internal error' messaging. - Pass the cause to storage errors where applicable. Change-Id: Ieb14256b202ccbfa3a96f68fa35db7e40f92114d --- taskflow/exceptions.py | 2 + taskflow/persistence/backends/impl_dir.py | 49 +++++++++++++------ .../persistence/backends/impl_sqlalchemy.py | 22 +++------ .../persistence/backends/impl_zookeeper.py | 2 +- 4 files changed, 44 insertions(+), 31 deletions(-) diff --git a/taskflow/exceptions.py b/taskflow/exceptions.py index a6a05082..d8f64d9b 100644 --- a/taskflow/exceptions.py +++ b/taskflow/exceptions.py @@ -41,6 +41,8 @@ class StorageError(TaskFlowException): """Raised when logbook can not be read/saved/deleted.""" def __init__(self, message, cause=None): + if cause is not None: + message += ": %s" % (cause) super(StorageError, self).__init__(message) self.cause = cause diff --git a/taskflow/persistence/backends/impl_dir.py b/taskflow/persistence/backends/impl_dir.py index 4db10229..8314ee43 100644 --- a/taskflow/persistence/backends/impl_dir.py +++ b/taskflow/persistence/backends/impl_dir.py @@ -109,8 +109,7 @@ class Connection(base.Connection): except Exception as e: LOG.exception("Failed running locking file based session") # NOTE(harlowja): trap all other errors as storage errors. - raise exc.StorageError("Failed running locking file based " - "session: %s" % e, e) + raise exc.StorageError("Storage backend internal error", e) def _get_logbooks(self): lb_uuids = [] @@ -127,8 +126,13 @@ class Connection(base.Connection): pass def get_logbooks(self): - for b in list(self._get_logbooks()): - yield b + try: + books = list(self._get_logbooks()) + except EnvironmentError as e: + raise exc.StorageError("Unable to fetch logbooks", e) + else: + for b in books: + yield b @property def backend(self): @@ -286,11 +290,20 @@ class Connection(base.Connection): def upgrade(self): def _step_create(): - for d in (self._book_path, self._flow_path, self._task_path): - misc.ensure_tree(d) + for path in (self._book_path, self._flow_path, self._task_path): + try: + misc.ensure_tree(path) + except EnvironmentError as e: + raise exc.StorageError("Unable to create logbooks" + " required child path %s" % path, e) + + for path in (self._backend.base_path, self._backend.lock_path): + try: + misc.ensure_tree(path) + except EnvironmentError as e: + raise exc.StorageError("Unable to create logbooks required" + " path %s" % path, e) - misc.ensure_tree(self._backend.base_path) - misc.ensure_tree(self._backend.lock_path) self._run_with_process_lock("init", _step_create) def clear_all(self): @@ -316,32 +329,36 @@ class Connection(base.Connection): def _destroy_tasks(task_details): for task_detail in task_details: + task_path = os.path.join(self._task_path, task_detail.uuid) try: - shutil.rmtree(os.path.join(self._task_path, - task_detail.uuid)) + shutil.rmtree(task_path) except EnvironmentError as e: if e.errno != errno.ENOENT: - raise + raise exc.StorageError("Unable to remove task" + " directory %s" % task_path, e) def _destroy_flows(flow_details): for flow_detail in flow_details: + flow_path = os.path.join(self._flow_path, flow_detail.uuid) self._run_with_process_lock("task", _destroy_tasks, list(flow_detail)) try: - shutil.rmtree(os.path.join(self._flow_path, - flow_detail.uuid)) + shutil.rmtree(flow_path) except EnvironmentError as e: if e.errno != errno.ENOENT: - raise + raise exc.StorageError("Unable to remove flow" + " directory %s" % flow_path, e) def _destroy_book(): book = self._get_logbook(book_uuid) + book_path = os.path.join(self._book_path, book.uuid) self._run_with_process_lock("flow", _destroy_flows, list(book)) try: - shutil.rmtree(os.path.join(self._book_path, book.uuid)) + shutil.rmtree(book_path) except EnvironmentError as e: if e.errno != errno.ENOENT: - raise + raise exc.StorageError("Unable to remove book" + " directory %s" % book_path, e) # Acquire all locks by going through this little hierarchy. self._run_with_process_lock("book", _destroy_book) diff --git a/taskflow/persistence/backends/impl_sqlalchemy.py b/taskflow/persistence/backends/impl_sqlalchemy.py index b9d0c63f..23b34a99 100644 --- a/taskflow/persistence/backends/impl_sqlalchemy.py +++ b/taskflow/persistence/backends/impl_sqlalchemy.py @@ -308,16 +308,14 @@ class Connection(base.Connection): return functor(session, *args, **kwargs) except sa_exc.SQLAlchemyError as e: LOG.exception('Failed running database session') - raise exc.StorageError("Failed running database session: %s" % e, - e) + raise exc.StorageError("Storage backend internal error", e) def _make_session(self): try: return self._session_maker() except sa_exc.SQLAlchemyError as e: LOG.exception('Failed creating database session') - raise exc.StorageError("Failed creating database session: %s" - % e, e) + raise exc.StorageError("Failed creating database session", e) def upgrade(self): try: @@ -334,8 +332,7 @@ class Connection(base.Connection): migration.db_sync(conn) except sa_exc.SQLAlchemyError as e: LOG.exception('Failed upgrading database version') - raise exc.StorageError("Failed upgrading database version: %s" % e, - e) + raise exc.StorageError("Failed upgrading database version", e) def _clear_all(self, session): # NOTE(harlowja): due to how we have our relationship setup and @@ -345,7 +342,7 @@ class Connection(base.Connection): return session.query(models.LogBook).delete() except sa_exc.DBAPIError as e: LOG.exception('Failed clearing all entries') - raise exc.StorageError("Failed clearing all entries: %s" % e, e) + raise exc.StorageError("Failed clearing all entries", e) def clear_all(self): return self._run_in_session(self._clear_all) @@ -380,8 +377,7 @@ class Connection(base.Connection): session.delete(lb) except sa_exc.DBAPIError as e: LOG.exception('Failed destroying logbook') - raise exc.StorageError("Failed destroying" - " logbook %s: %s" % (lb_id, e), e) + raise exc.StorageError("Failed destroying logbook %s" % lb_id, e) def destroy_logbook(self, book_uuid): return self._run_in_session(self._destroy_logbook, lb_id=book_uuid) @@ -402,8 +398,7 @@ class Connection(base.Connection): return _convert_lb_to_external(lb_m) except sa_exc.DBAPIError as e: LOG.exception('Failed saving logbook') - raise exc.StorageError("Failed saving logbook %s: %s" % - (lb.uuid, e), e) + raise exc.StorageError("Failed saving logbook %s" % lb.uuid, e) def save_logbook(self, book): return self._run_in_session(self._save_logbook, lb=book) @@ -415,8 +410,7 @@ class Connection(base.Connection): return _convert_lb_to_external(lb) except sa_exc.DBAPIError as e: LOG.exception('Failed getting logbook') - raise exc.StorageError("Failed getting logbook %s: %s" - % (book_uuid, e), e) + raise exc.StorageError("Failed getting logbook %s" % book_uuid, e) def get_logbooks(self): session = self._make_session() @@ -425,7 +419,7 @@ class Connection(base.Connection): books = [_convert_lb_to_external(lb) for lb in raw_books] except sa_exc.DBAPIError as e: LOG.exception('Failed getting logbooks') - raise exc.StorageError("Failed getting logbooks: %s" % e, e) + raise exc.StorageError("Failed getting logbooks", e) for lb in books: yield lb diff --git a/taskflow/persistence/backends/impl_zookeeper.py b/taskflow/persistence/backends/impl_zookeeper.py index 382bbdff..73401d9a 100644 --- a/taskflow/persistence/backends/impl_zookeeper.py +++ b/taskflow/persistence/backends/impl_zookeeper.py @@ -138,7 +138,7 @@ class ZkConnection(base.Connection): except k_exc.NodeExistsError as e: raise exc.AlreadyExists("Storage backend duplicate node: %s" % e) except (k_exc.KazooException, k_exc.ZookeeperError) as e: - raise exc.StorageError("Storage backend internal error: %s" % e) + raise exc.StorageError("Storage backend internal error", e) def update_task_details(self, td): """Update a task_detail transactionally."""