fenix/fenix/db/sqlalchemy/api.py

746 lines
24 KiB
Python

# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of SQLAlchemy backend."""
import sys
from oslo_config import cfg
from oslo_db import exception as common_db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_log import log as logging
import sqlalchemy as sa
from fenix.db import exceptions as db_exc
from fenix.db.sqlalchemy import facade_wrapper
from fenix.db.sqlalchemy import models
LOG = logging.getLogger(__name__)
get_engine = facade_wrapper.get_engine
get_session = facade_wrapper.get_session
def get_backend():
"""The backend is this module itself."""
return sys.modules[__name__]
def model_query(model, session=None):
"""Query helper.
:param model: base model to query
"""
session = session or get_session()
return session.query(model)
def setup_db():
try:
engine = db_session.EngineFacade(cfg.CONF.database.connection,
sqlite_fk=True).get_engine()
models.MaintenanceSession.metadata.create_all(engine)
models.MaintenanceActionPlugin.metadata.create_all(engine)
models.MaintenanceActionPluginInstance.metadata.create_all(engine)
models.MaintenanceDownload.metadata.create_all(engine)
models.MaintenanceHost.metadata.create_all(engine)
models.MaintenanceProject.metadata.create_all(engine)
models.MaintenanceInstance.metadata.create_all(engine)
models.ProjectInstance.metadata.create_all(engine)
models.MaintenanceProject.metadata.create_all(engine)
models.InstanceGroup.metadata.create_all(engine)
except sa.exc.OperationalError as e:
LOG.error("Database registration exception: %s", e)
return False
return True
def drop_db():
try:
db_session.EngineFacade(cfg.CONF.database.connection,
sqlite_fk=True).get_engine()
except Exception as e:
LOG.error("Database shutdown exception: %s", e)
return False
return True
# Helpers for building constraints / equality checks
def constraint(**conditions):
return Constraint(conditions)
def equal_any(*values):
return EqualityCondition(values)
def not_equal(*values):
return InequalityCondition(values)
def _selected_from_dict(selected, dict_source):
return {x: dict_source[x] for x in selected}
class Constraint(object):
def __init__(self, conditions):
self.conditions = conditions
def apply(self, model, query):
for key, condition in self.conditions.items():
for clause in condition.clauses(getattr(model, key)):
query = query.filter(clause)
return query
class EqualityCondition(object):
def __init__(self, values):
self.values = values
def clauses(self, field):
return sa.or_([field == value for value in self.values])
class InequalityCondition(object):
def __init__(self, values):
self.values = values
def clauses(self, field):
return [field != value for value in self.values]
# Maintenance session
def _maintenance_session_get(session, session_id):
query = model_query(models.MaintenanceSession, session)
return query.filter_by(session_id=session_id).first()
def maintenance_session_get(session_id):
return _maintenance_session_get(get_session(), session_id)
def _maintenance_session_get_all(session):
query = model_query(models.MaintenanceSession, session)
return query
def maintenance_session_get_all():
return _maintenance_session_get_all(get_session())
def create_session(values):
values = values.copy()
msession = models.MaintenanceSession()
msession.update(values)
session = get_session()
with session.begin():
try:
msession.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=msession.__class__.__name__, columns=e.columns)
return maintenance_session_get(msession.session_id)
def update_session(values):
session = get_session()
session_id = values.session_id
with session.begin():
msession = _maintenance_session_get(session,
session_id)
msession.update(values)
msession.save(session=session)
return maintenance_session_get(session_id)
def remove_session(session_id):
session = get_session()
with session.begin():
action_plugin_instances = _action_plugin_instances_get_all(session,
session_id)
if action_plugin_instances:
for action in action_plugin_instances:
session.delete(action)
action_plugins = _action_plugins_get_all(session, session_id)
if action_plugins:
for action in action_plugins:
session.delete(action)
downloads = _download_get_all(session, session_id)
if downloads:
for download in downloads:
session.delete(download)
hosts = _hosts_get(session, session_id)
if hosts:
for host in hosts:
session.delete(host)
projects = _projects_get(session, session_id)
if projects:
for project in projects:
session.delete(project)
instances = _instances_get(session, session_id)
if instances:
for instance in instances:
session.delete(instance)
msession = _maintenance_session_get(session, session_id)
if not msession:
# raise not found error
raise db_exc.FenixDBNotFound(model="MaintenanceSession",
id=session_id)
session.delete(msession)
# Action Plugin
def _action_plugin_get(session, session_id, plugin):
query = model_query(models.MaintenanceActionPlugin, session)
return query.filter_by(session_id=session_id, plugin=plugin).first()
def action_plugin_get(session_id, plugin):
return _action_plugin_get(get_session(), session_id, plugin)
def _action_plugins_get_all(session, session_id):
query = model_query(models.MaintenanceActionPlugin, session)
return query.filter_by(session_id=session_id).all()
def action_plugins_get_all(session_id):
return _action_plugins_get_all(get_session(), session_id)
def create_action_plugin(values):
values = values.copy()
ap = models.MaintenanceActionPlugin()
ap.update(values)
if action_plugin_get(ap.session_id, ap.plugin):
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=ap.plugin)
session = get_session()
with session.begin():
try:
ap.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=e.columns)
return action_plugin_get(ap.session_id, ap.plugin)
def create_action_plugins(values_list):
for values in values_list:
vals = values.copy()
session = get_session()
with session.begin():
ap = models.MaintenanceActionPlugin()
ap.update(vals)
if action_plugin_get(ap.session_id, ap.plugin):
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=ap.plugin)
try:
ap.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=e.columns)
return action_plugins_get_all(ap.session_id)
# Action Plugin Instance
def _action_plugin_instance_get(session, session_id, plugin, hostname):
query = model_query(models.MaintenanceActionPluginInstance, session)
return query.filter_by(session_id=session_id, plugin=plugin,
hostname=hostname).first()
def action_plugin_instance_get(session_id, plugin, hostname):
return _action_plugin_instance_get(get_session(), session_id, plugin,
hostname)
def _action_plugin_instances_get_all(session, session_id):
query = model_query(models.MaintenanceActionPluginInstance, session)
return query.filter_by(session_id=session_id).all()
def action_plugin_instances_get_all(session_id):
return _action_plugin_instances_get_all(get_session(), session_id)
def update_action_plugin_instance(values):
session = get_session()
session_id = values.session_id
plugin = values.plugin
hostname = values.hostname
with session.begin():
ap_instance = _action_plugin_instance_get(session,
session_id,
plugin,
hostname)
ap_instance.update(values)
ap_instance.save(session=session)
return action_plugin_instance_get(session_id, plugin, hostname)
def create_action_plugin_instance(values):
values = values.copy()
ap_instance = models.MaintenanceActionPluginInstance()
ap_instance.update(values)
session = get_session()
if _action_plugin_instance_get(session,
ap_instance.session_id,
ap_instance.plugin,
ap_instance.hostname):
selected = ['session_id', 'plugin', 'hostname']
raise db_exc.FenixDBDuplicateEntry(
model=ap_instance.__class__.__name__,
columns=str(_selected_from_dict(selected,
ap_instance.to_dict())))
with session.begin():
try:
ap_instance.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=ap_instance.__class__.__name__, columns=e.columns)
return action_plugin_instance_get(ap_instance.session_id,
ap_instance.plugin,
ap_instance.hostname)
def remove_action_plugin_instance(ap_instance):
session = get_session()
if not _action_plugin_instance_get(session,
ap_instance.session_id,
ap_instance.plugin,
ap_instance.hostname):
selected = ['session_id', 'plugin', 'hostname']
raise db_exc.FenixDBNotFound(model=ap_instance.__class__.__name__,
id=str(_selected_from_dict(selected,
ap_instance.to_dict())))
with session.begin():
session.delete(ap_instance)
# Download
def _download_get(session, session_id, local_file):
query = model_query(models.MaintenanceDownload, session)
return query.filter_by(session_id=session_id,
local_file=local_file).first()
def download_get(session_id, local_file):
return _download_get(get_session(), session_id, local_file)
def _download_get_all(session, session_id):
query = model_query(models.MaintenanceDownload, session)
return query.filter_by(session_id=session_id).all()
def download_get_all(session_id):
return _download_get_all(get_session(), session_id)
def create_downloads(values_list):
for values in values_list:
vals = values.copy()
session = get_session()
with session.begin():
d = models.MaintenanceDownload()
d.update(vals)
if _download_get(session, d.session_id, d.local_file):
selected = ['local_file']
raise db_exc.FenixDBDuplicateEntry(
model=d.__class__.__name__,
columns=str(_selected_from_dict(selected, vals)))
try:
d.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=d.__class__.__name__, columns=e.columns)
return action_plugins_get_all(d.session_id)
# Host
def _host_get(session, session_id, hostname):
query = model_query(models.MaintenanceHost, session)
return query.filter_by(session_id=session_id, hostname=hostname).first()
def host_get(session_id, hostname):
return _host_get(get_session(), session_id, hostname)
def _hosts_get(session, session_id):
query = model_query(models.MaintenanceHost, session)
return query.filter_by(session_id=session_id).all()
def hosts_get(session_id):
return _hosts_get(get_session(), session_id)
def create_host(values):
values = values.copy()
mhost = models.MaintenanceHost()
mhost.update(values)
session = get_session()
if _host_get(session, mhost.session_id, mhost.hostname):
selected = ['hostname']
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__,
columns=str(_selected_from_dict(selected,
mhost.to_dict())))
with session.begin():
try:
mhost.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__, columns=e.columns)
return host_get(mhost.session_id, mhost.hostname)
def update_host(values):
session = get_session()
session_id = values.session_id
hostname = values.hostname
with session.begin():
mhost = _host_get(session, session_id, hostname)
mhost.update(values)
mhost.save(session=session)
return host_get(session_id, hostname)
def create_hosts(values_list):
for values in values_list:
vals = values.copy()
session = get_session()
with session.begin():
mhost = models.MaintenanceHost()
mhost.update(vals)
if _host_get(session, mhost.session_id, mhost.hostname):
selected = ['hostname']
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__,
columns=str(_selected_from_dict(selected,
mhost.to_dict())))
try:
mhost.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__, columns=e.columns)
return hosts_get(mhost.session_id)
# Project
def _project_get(session, session_id, project_id):
query = model_query(models.MaintenanceProject, session)
return query.filter_by(session_id=session_id,
project_id=project_id).first()
def project_get(session_id, project_id):
return _project_get(get_session(), session_id, project_id)
def _projects_get(session, session_id):
query = model_query(models.MaintenanceProject, session)
return query.filter_by(session_id=session_id).all()
def projects_get(session_id):
return _projects_get(get_session(), session_id)
def create_project(values):
values = values.copy()
mproject = models.MaintenanceProject()
mproject.update(values)
session = get_session()
if _project_get(session, mproject.session_id, mproject.project_id):
selected = ['project_id']
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__,
columns=str(_selected_from_dict(selected,
mproject.to_dict())))
with session.begin():
try:
mproject.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__, columns=e.columns)
return project_get(mproject.session_id, mproject.project_id)
def update_project(values):
session = get_session()
session_id = values.session_id
project_id = values.project_id
with session.begin():
mproject = _project_get(session, session_id, project_id)
mproject.update(values)
mproject.save(session=session)
return project_get(session_id, project_id)
def create_projects(values_list):
for values in values_list:
vals = values.copy()
session = get_session()
with session.begin():
mproject = models.MaintenanceProject()
mproject.update(vals)
if _project_get(session, mproject.session_id,
mproject.project_id):
selected = ['project_id']
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__,
columns=str(_selected_from_dict(selected,
mproject.to_dict())))
try:
mproject.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__, columns=e.columns)
return projects_get(mproject.session_id)
# Instance
def _instance_get(session, session_id, instance_id):
query = model_query(models.MaintenanceInstance, session)
return query.filter_by(session_id=session_id,
instance_id=instance_id).first()
def instance_get(session_id, instance_id):
return _instance_get(get_session(), session_id, instance_id)
def _instances_get(session, session_id):
query = model_query(models.MaintenanceInstance, session)
return query.filter_by(session_id=session_id).all()
def instances_get(session_id):
return _instances_get(get_session(), session_id)
def update_instance(values):
session = get_session()
session_id = values.session_id
instance_id = values.instance_id
with session.begin():
minstance = _instance_get(session, session_id, instance_id)
minstance.update(values)
minstance.save(session=session)
return instance_get(session_id, instance_id)
def create_instance(values):
values = values.copy()
minstance = models.MaintenanceInstance()
minstance.update(values)
session = get_session()
if _instance_get(session, minstance.session_id,
minstance.instance_id):
selected = ['instance_id']
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__,
columns=str(_selected_from_dict(selected,
minstance.to_dict())))
with session.begin():
try:
minstance.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__, columns=e.columns)
return instance_get(minstance.session_id, minstance.instance_id)
def create_instances(values_list):
for values in values_list:
vals = values.copy()
session = get_session()
with session.begin():
minstance = models.MaintenanceInstance()
minstance.update(vals)
if _instance_get(session, minstance.session_id,
minstance.instance_id):
selected = ['instance_id']
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__,
columns=str(_selected_from_dict(selected,
minstance.to_dict())))
try:
minstance.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__, columns=e.columns)
return instances_get(minstance.session_id)
def remove_instance(session_id, instance_id):
session = get_session()
with session.begin():
minstance = _instance_get(session, session_id, instance_id)
if not minstance:
# raise not found error
raise db_exc.FenixDBNotFound(model='MaintenanceInstance',
id=instance_id)
session.delete(minstance)
# Project instances
def _project_instance_get(session, instance_id):
query = model_query(models.ProjectInstance, session)
return query.filter_by(instance_id=instance_id).first()
def project_instance_get(instance_id):
return _project_instance_get(get_session(), instance_id)
def update_project_instance(values):
values = values.copy()
session = get_session()
with session.begin():
minstance = _project_instance_get(session, values['instance_id'])
if not minstance:
minstance = models.ProjectInstance()
minstance.update(values)
try:
minstance.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__, columns=e.columns)
return project_instance_get(minstance.instance_id)
def remove_project_instance(instance_id):
session = get_session()
with session.begin():
minstance = _project_instance_get(session, instance_id)
if not minstance:
# raise not found error
raise db_exc.FenixDBNotFound(model='ProjectInstance',
id=instance_id)
session.delete(minstance)
# Instances groups
def _instance_group_get(session, group_id):
query = model_query(models.InstanceGroup, session)
return query.filter_by(group_id=group_id).first()
def instance_group_get(group_id):
return _instance_group_get(get_session(), group_id)
def _instance_groups_get(session):
query = model_query(models.InstanceGroup, session)
return query.all()
def instance_groups_get():
return _instance_groups_get(get_session())
def _group_instances_get(session, group_id):
query = model_query(models.ProjectInstance, session)
return query.filter_by(group_id=group_id).all()
def group_instances_get(group_id):
return _group_instances_get(get_session(), group_id)
def update_instance_group(values):
values = values.copy()
session = get_session()
with session.begin():
ig = _instance_group_get(session, values['group_id'])
if not ig:
ig = models.InstanceGroup()
ig.update(values)
try:
ig.save(session=session)
except common_db_exc.DBDuplicateEntry as e:
# raise exception about duplicated columns (e.columns)
raise db_exc.FenixDBDuplicateEntry(
model=ig.__class__.__name__, columns=e.columns)
return instance_group_get(ig.group_id)
def remove_instance_group(group_id):
session = get_session()
with session.begin():
ig = _instance_group_get(session, group_id)
if not ig:
# raise not found error
raise db_exc.FenixDBNotFound(model='InstanceGroup',
id=group_id)
session.delete(ig)