Changes project layout
Changing project layout so that we have the same dir structure for both inventory and workflow APIs. This is so that workflow can use the same db models as inventory. Change-Id: I17896ccd76209f726b7197e1db1a3b513b7c2f5e
This commit is contained in:
5
craton/db/__init__.py
Normal file
5
craton/db/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
DB abstraction for Craton Inventory
|
||||
"""
|
||||
|
||||
from craton.db.api import * # noqa
|
||||
187
craton/db/api.py
Normal file
187
craton/db/api.py
Normal file
@@ -0,0 +1,187 @@
|
||||
"""Defines interface for DB access."""
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db import api as db_api
|
||||
|
||||
db_opts = [
|
||||
cfg.StrOpt('db_backend', default='sqlalchemy',
|
||||
help='The backend to use for DB.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(db_opts)
|
||||
|
||||
# entrypoint namespace for db backend
|
||||
BACKEND_MAPPING = {'sqlalchemy': 'craton.db.sqlalchemy.api'}
|
||||
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=BACKEND_MAPPING,
|
||||
lazy=True)
|
||||
|
||||
|
||||
def get_user_info(context, user):
|
||||
return IMPL.get_user_info(context, user)
|
||||
|
||||
|
||||
# Devices
|
||||
|
||||
Blame = namedtuple('Blame', ['source', 'variable'])
|
||||
|
||||
|
||||
def device_blame_variables(device, keys=None):
|
||||
"""Determines the sources of how variables have been set for a device.
|
||||
:param device: device to get blame information
|
||||
:param keys: keys to check sourcing, or all keys if None
|
||||
|
||||
Returns the (source, variable) in a named tuple; note that
|
||||
variable contains certain audit/governance information
|
||||
(created_at, modified_at).
|
||||
|
||||
TODO(jimbaker) further extend schema on mixed-in variable tables
|
||||
to capture additional governance, such as user who set the key;
|
||||
this will then transparently become available in the blame.
|
||||
"""
|
||||
if keys is None:
|
||||
keys = device.resolved.keys()
|
||||
sources = [device] + list(device.labels) + [device.cell, device.region]
|
||||
blamed = {}
|
||||
for key in keys:
|
||||
for source in sources:
|
||||
try:
|
||||
blamed[key] = Blame(source, source._variables[key])
|
||||
break
|
||||
except KeyError:
|
||||
pass
|
||||
return blamed
|
||||
|
||||
|
||||
# Cells
|
||||
|
||||
def cells_get_all(context, region):
|
||||
"""Get all available cells."""
|
||||
return IMPL.cells_get_all(context, region)
|
||||
|
||||
|
||||
def cells_get_by_name(context, region, cell):
|
||||
"""Get cell detail for the cell in given region."""
|
||||
return IMPL.cells_get_by_name(context, region, cell)
|
||||
|
||||
|
||||
def cells_get_by_id(context, cell_id):
|
||||
"""Get cell detail for the unique cell id."""
|
||||
return IMPL.cells_get_by_id(context, cell_id)
|
||||
|
||||
|
||||
def cells_create(context, values):
|
||||
"""Create a new cell."""
|
||||
return IMPL.cells_create(context, values)
|
||||
|
||||
|
||||
def cells_update(context, cell_id, values):
|
||||
"""Update an existing cell."""
|
||||
return IMPL.cells_update(context, cell_id, values)
|
||||
|
||||
|
||||
def cells_delete(context, cell_id):
|
||||
"""Delete an existing cell."""
|
||||
return IMPL.cells_delete(context, cell_id)
|
||||
|
||||
|
||||
def cells_data_update(context, cell_id, data):
|
||||
"""Update existing cells variables or create when
|
||||
its not present.
|
||||
"""
|
||||
return IMPL.cells_data_update(context, cell_id, data)
|
||||
|
||||
|
||||
def cells_data_delete(context, cell_id, data_key):
|
||||
"""Delete the existing key (variable) from cells data."""
|
||||
return IMPL.cells_data_delete(context, cell_id, data_key)
|
||||
|
||||
# Regions
|
||||
|
||||
|
||||
def regions_get_all(context):
|
||||
"""Get all available regions."""
|
||||
return IMPL.regions_get_all(context)
|
||||
|
||||
|
||||
def regions_get_by_name(context, name):
|
||||
"""Get cell detail for the region with given name."""
|
||||
return IMPL.regions_get_by_name(context, name)
|
||||
|
||||
|
||||
def regions_get_by_id(context, region_id):
|
||||
"""Get cell detail for the region with given id."""
|
||||
return IMPL.regions_get_by_id(context, region_id)
|
||||
|
||||
|
||||
def regions_create(context, values):
|
||||
"""Create a new region."""
|
||||
return IMPL.regions_create(context, values)
|
||||
|
||||
|
||||
def regions_update(context, region_id, values):
|
||||
"""Update an existing region."""
|
||||
return IMPL.regions_update(context, region_id, values)
|
||||
|
||||
|
||||
def regions_delete(context, region_id):
|
||||
"""Delete an existing region."""
|
||||
return IMPL.regions_delete(context, region_id)
|
||||
|
||||
|
||||
def regions_data_update(context, region_id, data):
|
||||
"""
|
||||
Update existing region variables or create when its not present.
|
||||
"""
|
||||
return IMPL.regions_data_update(context, region_id, data)
|
||||
|
||||
|
||||
def regions_data_delete(context, region_id, data_key):
|
||||
"""Delete the existing key (variable) from region data."""
|
||||
return IMPL.regions_data_delete(context, region_id, data_key)
|
||||
|
||||
# Hosts
|
||||
|
||||
|
||||
def hosts_get_by_region_cell(context, region_id, cell_id, filters):
|
||||
"""Get all hosts for region/cell."""
|
||||
return IMPL.hosts_get_by_region_cell(context, region_id, cell_id, filters)
|
||||
|
||||
|
||||
def hosts_get_by_region(context, region_id, filters):
|
||||
"""Get all hosts for this region."""
|
||||
return IMPL.hosts_get_by_region(context, region_id, filters)
|
||||
|
||||
|
||||
def hosts_get_by_id(context, host_id):
|
||||
"""Get details for the host with given id."""
|
||||
return IMPL.hosts_get_by_id(context, host_id)
|
||||
|
||||
|
||||
def hosts_create(context, values):
|
||||
"""Create a new host."""
|
||||
return IMPL.hosts_create(context, values)
|
||||
|
||||
|
||||
def hosts_update(context, host_id, values):
|
||||
"""Update an existing host."""
|
||||
return IMPL.hosts_update(context, host_id, values)
|
||||
|
||||
|
||||
def hosts_delete(context, host_id):
|
||||
"""Delete an existing host."""
|
||||
return IMPL.hosts_delete(context, host_id)
|
||||
|
||||
|
||||
def hosts_data_update(context, host_id, data):
|
||||
"""
|
||||
Update existing host variables or create them when not present.
|
||||
"""
|
||||
return IMPL.hosts_data_update(context, host_id, data)
|
||||
|
||||
|
||||
def hosts_data_delete(context, host_id, data_key):
|
||||
"""Delete the existing key (variable) from region data."""
|
||||
return IMPL.hosts_data_delete(context, host_id, data_key)
|
||||
0
craton/db/sqlalchemy/__init__.py
Normal file
0
craton/db/sqlalchemy/__init__.py
Normal file
68
craton/db/sqlalchemy/alembic.ini
Normal file
68
craton/db/sqlalchemy/alembic.ini
Normal file
@@ -0,0 +1,68 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
#sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
11
craton/db/sqlalchemy/alembic/README
Normal file
11
craton/db/sqlalchemy/alembic/README
Normal file
@@ -0,0 +1,11 @@
|
||||
Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
|
||||
|
||||
To create alembic migrations use:
|
||||
$ craton-inventory-dbsync --config-file=craton.conf revision --message "revision description" --autogenerate
|
||||
|
||||
Stamp db with most recent migration version, without actually running migrations
|
||||
$ craton-inventory-dbsync --config-file=crton.conf stamp head
|
||||
|
||||
Upgrade can be performed by:
|
||||
$ craton-inventory-dbsync --config-file=craton.conf upgrade
|
||||
$ craton-inventory-dbsync --config-file=craton.conf upgrade head
|
||||
66
craton/db/sqlalchemy/alembic/env.py
Normal file
66
craton/db/sqlalchemy/alembic/env.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from logging.config import fileConfig
|
||||
|
||||
from craton.db.sqlalchemy import api as sa_api
|
||||
from craton.db.sqlalchemy import models as db_models
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = db_models.Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
engine = sa_api.get_engine()
|
||||
with engine.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
run_migrations_online()
|
||||
24
craton/db/sqlalchemy/alembic/script.py.mako
Normal file
24
craton/db/sqlalchemy/alembic/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,214 @@
|
||||
"""craton_inventory_init
|
||||
|
||||
Revision ID: ffdc1a500db1
|
||||
Revises:
|
||||
Create Date: 2016-06-03 09:52:55.302936
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ffdc1a500db1'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy_utils
|
||||
|
||||
|
||||
def upgrade():
|
||||
# commands auto generated by Alembic - please adjust!
|
||||
op.create_table(
|
||||
'access_secrets',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('cert', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table(
|
||||
'labels',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('label', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('label')
|
||||
)
|
||||
op.create_table(
|
||||
'projects',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table(
|
||||
'label_variables',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
|
||||
nullable=True),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['labels.id'], ),
|
||||
sa.PrimaryKeyConstraint('parent_id', 'key')
|
||||
)
|
||||
op.create_table(
|
||||
'regions',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('project_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('note', sa.Text(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('project_id', 'name',
|
||||
name='uq_region0projectid0name')
|
||||
)
|
||||
op.create_index(op.f('ix_regions_project_id'),
|
||||
'regions', ['project_id'], unique=False)
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('project_id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('api_key', sa.String(length=36), nullable=True),
|
||||
sa.Column('is_admin', sa.Boolean(), nullable=True),
|
||||
sa.Column('roles', sqlalchemy_utils.types.json.JSONType(),
|
||||
nullable=True),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('username', 'project_id',
|
||||
name='uq_user0username0project')
|
||||
)
|
||||
op.create_index(op.f('ix_users_project_id'), 'users', ['project_id'],
|
||||
unique=False)
|
||||
op.create_table(
|
||||
'cells',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('region_id', sa.Integer(), nullable=False),
|
||||
sa.Column('project_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('note', sa.Text(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('region_id', 'name', name='uq_cell0regionid0name')
|
||||
)
|
||||
op.create_index(op.f('ix_cells_project_id'), 'cells', ['project_id'],
|
||||
unique=False)
|
||||
op.create_index(op.f('ix_cells_region_id'), 'cells', ['region_id'],
|
||||
unique=False)
|
||||
op.create_table(
|
||||
'region_variables',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
|
||||
nullable=True),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['regions.id'], ),
|
||||
sa.PrimaryKeyConstraint('parent_id', 'key')
|
||||
)
|
||||
op.create_table(
|
||||
'cell_variables',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
|
||||
nullable=True),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['cells.id'], ),
|
||||
sa.PrimaryKeyConstraint('parent_id', 'key')
|
||||
)
|
||||
op.create_table(
|
||||
'devices',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('type', sa.String(length=50), nullable=True),
|
||||
sa.Column('device_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('region_id', sa.Integer(), nullable=False),
|
||||
sa.Column('cell_id', sa.Integer(), nullable=True),
|
||||
sa.Column('project_id', sa.Integer(), nullable=False),
|
||||
sa.Column('ip_address',
|
||||
sqlalchemy_utils.types.IPAddressType(length=64),
|
||||
nullable=False),
|
||||
sa.Column('active', sa.Boolean(), nullable=True),
|
||||
sa.Column('note', sa.Text(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['cell_id'], ['cells.id'], ),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('region_id', 'name',
|
||||
name='uq_device0regionid0name')
|
||||
)
|
||||
op.create_index(op.f('ix_devices_cell_id'), 'devices', ['cell_id'],
|
||||
unique=False)
|
||||
op.create_index(op.f('ix_devices_project_id'), 'devices', ['project_id'],
|
||||
unique=False)
|
||||
op.create_index(op.f('ix_devices_region_id'), 'devices', ['region_id'],
|
||||
unique=False)
|
||||
op.create_table(
|
||||
'device_labels',
|
||||
sa.Column('device_id', sa.Integer(), nullable=False),
|
||||
sa.Column('label_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
|
||||
sa.ForeignKeyConstraint(['label_id'], ['labels.id'], ),
|
||||
sa.PrimaryKeyConstraint('device_id', 'label_id')
|
||||
)
|
||||
op.create_table(
|
||||
'device_variables',
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
|
||||
nullable=True),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['devices.id'], ),
|
||||
sa.PrimaryKeyConstraint('parent_id', 'key')
|
||||
)
|
||||
op.create_table(
|
||||
'hosts',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('access_secret_id', sa.Integer(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['access_secret_id'], ['access_secrets.id'], ),
|
||||
sa.ForeignKeyConstraint(['id'], ['devices.id'], ),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['hosts.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# end Alembic commands
|
||||
|
||||
|
||||
def downgrade():
|
||||
# commands auto generated by Alembic - please adjust!
|
||||
op.drop_table('hosts')
|
||||
op.drop_table('device_variables')
|
||||
op.drop_table('device_labels')
|
||||
op.drop_index(op.f('ix_devices_region_id'), table_name='devices')
|
||||
op.drop_index(op.f('ix_devices_project_id'), table_name='devices')
|
||||
op.drop_index(op.f('ix_devices_cell_id'), table_name='devices')
|
||||
op.drop_table('devices')
|
||||
op.drop_table('cell_variables')
|
||||
op.drop_table('region_variables')
|
||||
op.drop_index(op.f('ix_cells_region_id'), table_name='cells')
|
||||
op.drop_index(op.f('ix_cells_project_id'), table_name='cells')
|
||||
op.drop_table('cells')
|
||||
op.drop_index(op.f('ix_users_project_id'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index(op.f('ix_regions_project_id'), table_name='regions')
|
||||
op.drop_table('regions')
|
||||
op.drop_table('label_variables')
|
||||
op.drop_table('projects')
|
||||
op.drop_table('labels')
|
||||
op.drop_table('access_secrets')
|
||||
# end Alembic commands
|
||||
435
craton/db/sqlalchemy/api.py
Normal file
435
craton/db/sqlalchemy/api.py
Normal file
@@ -0,0 +1,435 @@
|
||||
"""SQLAlchemy backend implementation."""
|
||||
|
||||
import sys
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db import options as db_options
|
||||
from oslo_db.sqlalchemy import session
|
||||
from oslo_db.sqlalchemy import utils as db_utils
|
||||
from oslo_log import log
|
||||
|
||||
import sqlalchemy.orm.exc as sa_exc
|
||||
from sqlalchemy.orm import with_polymorphic
|
||||
|
||||
from craton import exceptions
|
||||
from craton.db.sqlalchemy import models
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
_FACADE = None
|
||||
|
||||
_DEFAULT_SQL_CONNECTION = 'sqlite://'
|
||||
db_options.set_defaults(cfg.CONF,
|
||||
connection=_DEFAULT_SQL_CONNECTION)
|
||||
|
||||
|
||||
def _create_facade_lazily():
|
||||
global _FACADE
|
||||
if _FACADE is None:
|
||||
_FACADE = session.EngineFacade.from_config(cfg.CONF)
|
||||
return _FACADE
|
||||
|
||||
|
||||
def get_engine():
|
||||
facade = _create_facade_lazily()
|
||||
return facade.get_engine()
|
||||
|
||||
|
||||
def get_session(**kwargs):
|
||||
facade = _create_facade_lazily()
|
||||
return facade.get_session(**kwargs)
|
||||
|
||||
|
||||
def get_backend():
|
||||
"""The backend is this module itself."""
|
||||
return sys.modules[__name__]
|
||||
|
||||
|
||||
def is_admin_context(context):
|
||||
"""Check if this request had admin context."""
|
||||
# FIXME(sulo): fix after we have Users table
|
||||
return True
|
||||
|
||||
|
||||
def require_admin_context(f):
|
||||
"""Decorator that ensures admin request context."""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
if not is_admin_context(args[0]):
|
||||
raise exceptions.AdminRequired()
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def model_query(context, model, *args, **kwargs):
|
||||
"""Query helper that accounts for context's `read_deleted` field.
|
||||
:param context: context to query under
|
||||
:param model: model to query. Must be a subclass of ModelBase.
|
||||
:param session: if present, the session to use
|
||||
:param project_only: if present and context is user-type, then restrict
|
||||
query to match the context's project_id.
|
||||
"""
|
||||
session = kwargs.get('session') or get_session()
|
||||
project_only = kwargs.get('project_only')
|
||||
kwargs = dict()
|
||||
|
||||
if project_only and not context.is_admin:
|
||||
kwargs['project_id'] = context.tenant
|
||||
|
||||
return db_utils.model_query(
|
||||
model=model, session=session, args=args, **kwargs)
|
||||
|
||||
|
||||
def get_user_info(context, username):
|
||||
"""Get user info."""
|
||||
query = model_query(context, models.User, project_only=True)
|
||||
query = query.filter_by(username=username)
|
||||
try:
|
||||
return query.one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
except Exception as err:
|
||||
raise exceptions.UnknownException(message=err)
|
||||
|
||||
|
||||
def cells_get_all(context, region):
|
||||
"""Get all cells."""
|
||||
query = model_query(context, models.Cell, project_only=True)
|
||||
if region is not None:
|
||||
query = query.filter_by(region_id=region)
|
||||
|
||||
try:
|
||||
return query.all()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
except Exception as err:
|
||||
raise exceptions.UnknownException(message=err)
|
||||
|
||||
|
||||
def cells_get_by_name(context, region_id, cell_id):
|
||||
"""Get cell details given for a given cell in a region."""
|
||||
try:
|
||||
query = model_query(context, models.Cell).\
|
||||
filter_by(region_id=region_id).\
|
||||
filter_by(name=cell_id)
|
||||
return query.one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
|
||||
def cells_get_by_id(context, cell_id):
|
||||
"""Get cell details given for a given cell id."""
|
||||
try:
|
||||
query = model_query(context, models.Cell).\
|
||||
filter_by(id=cell_id)
|
||||
return query.one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
|
||||
def cells_create(context, values):
|
||||
"""Create a new cell."""
|
||||
session = get_session()
|
||||
cell = models.Cell()
|
||||
with session.begin():
|
||||
cell.update(values)
|
||||
cell.save(session)
|
||||
return cell
|
||||
|
||||
|
||||
def cells_update(context, cell_id, values):
|
||||
"""Update an existing cell."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Cell, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=cell_id)
|
||||
try:
|
||||
cell_ref = query.with_lockmode('update').one()
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
cell_ref.update(values)
|
||||
cell_ref.save(session)
|
||||
return cell_ref
|
||||
|
||||
|
||||
def cells_delete(context, cell_id):
|
||||
"""Delete an existing cell."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Cell, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=cell_id)
|
||||
query.delete()
|
||||
|
||||
|
||||
def cells_data_update(context, cell_id, data):
|
||||
"""Update existing cells variables or create when
|
||||
its not present.
|
||||
"""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Cell, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=cell_id)
|
||||
|
||||
try:
|
||||
cell_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
# cell does not exist so cant do this
|
||||
raise
|
||||
|
||||
for key in data:
|
||||
cell_ref.variables[key] = data[key]
|
||||
|
||||
return cell_ref
|
||||
|
||||
|
||||
def cells_data_delete(context, cell_id, data):
|
||||
"""Delete the existing key (variable) from cells data."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Cell, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=cell_id)
|
||||
|
||||
try:
|
||||
cell_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
# cell does not exist so cant do this
|
||||
raise
|
||||
|
||||
for key in data:
|
||||
try:
|
||||
del cell_ref.variables[data[key]]
|
||||
except KeyError:
|
||||
# This key does not exist so just ignore
|
||||
pass
|
||||
|
||||
return cell_ref
|
||||
|
||||
|
||||
def regions_get_all(context):
|
||||
"""Get all available regions."""
|
||||
query = model_query(context, models.Region, project_only=True)
|
||||
try:
|
||||
return query.all()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
|
||||
def regions_get_by_name(context, name):
|
||||
"""Get cell detail for the region with given name."""
|
||||
query = model_query(context, models.Region, project_only=True)
|
||||
query = query.filter_by(name=name)
|
||||
try:
|
||||
return query.one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
|
||||
def regions_get_by_id(context, region_id):
|
||||
"""Get cell detail for the region with given id."""
|
||||
query = model_query(context, models.Region, project_only=True)
|
||||
query = query.filter_by(id=region_id)
|
||||
try:
|
||||
return query.one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
|
||||
def regions_create(context, values):
|
||||
"""Create a new region."""
|
||||
session = get_session()
|
||||
region = models.Region()
|
||||
with session.begin():
|
||||
region.update(values)
|
||||
region.save(session)
|
||||
return region
|
||||
|
||||
|
||||
def regions_update(context, region_id, values):
|
||||
"""Update an existing region."""
|
||||
# We dont have anything to update right now
|
||||
pass
|
||||
|
||||
|
||||
def regions_delete(context, region_id):
|
||||
"""Delete an existing region."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Region, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=region_id)
|
||||
query.delete()
|
||||
return
|
||||
|
||||
|
||||
def regions_data_update(context, region_id, data):
|
||||
"""
|
||||
Update existing region variables or create when its not present.
|
||||
"""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Region, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=region_id)
|
||||
|
||||
try:
|
||||
region_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
# region does not exist so cant do this
|
||||
raise
|
||||
|
||||
for key in data:
|
||||
region_ref.variables[key] = data[key]
|
||||
|
||||
return region_ref
|
||||
|
||||
|
||||
def regions_data_delete(context, region_id, data):
|
||||
"""Delete the existing key (variable) from region data."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(context, models.Region, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=region_id)
|
||||
|
||||
try:
|
||||
region_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
# region does not exist so cant do this
|
||||
raise
|
||||
|
||||
for key in data:
|
||||
try:
|
||||
del region_ref.variables[data[key]]
|
||||
except KeyError:
|
||||
# This key does not exist so just ignore
|
||||
pass
|
||||
|
||||
return region_ref
|
||||
|
||||
|
||||
def hosts_get_by_region(context, region_id, filters):
|
||||
"""Get all hosts for this region.
|
||||
|
||||
:param region_id: ID for the region
|
||||
:param filters: filters wich contains differnt keys/values to match.
|
||||
Supported filters are by name, ip_address, id and cell_id.
|
||||
"""
|
||||
host_devices = with_polymorphic(models.Device, [models.Host])
|
||||
query = model_query(context, host_devices, project_only=True)
|
||||
query = query.filter_by(region_id=region_id)
|
||||
|
||||
if "name" in filters:
|
||||
query = query.filter_by(name=filters["name"])
|
||||
if "ip_address" in filters:
|
||||
query = query.filter_by(ip_address=filters["ip_address"])
|
||||
if "id" in filters:
|
||||
query = query.filter_by(id=filters["id"])
|
||||
if "cell" in filters:
|
||||
query = query.filter_by(cell_id=filters["cell"])
|
||||
if "device_type" in filters:
|
||||
query = query.filter_by(device_type=filters["device_type"])
|
||||
|
||||
try:
|
||||
result = query.all()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
except Exception as err:
|
||||
raise exceptions.UnknownException(message=err)
|
||||
return result
|
||||
|
||||
|
||||
def hosts_get_by_id(context, host_id):
|
||||
"""Get details for the host with given id."""
|
||||
host_devices = with_polymorphic(models.Device, '*')
|
||||
query = model_query(context, host_devices, project_only=True).\
|
||||
filter_by(id=host_id)
|
||||
try:
|
||||
result = query.one()
|
||||
LOG.info("Result by host id %s" % result)
|
||||
except sa_exc.NoResultFound:
|
||||
LOG.error("No result found for host with id %s" % host_id)
|
||||
raise exceptions.NotFound()
|
||||
except Exception as err:
|
||||
raise exceptions.UnknownException(message=err)
|
||||
return result
|
||||
|
||||
|
||||
def hosts_create(context, values):
|
||||
"""Create a new host."""
|
||||
session = get_session()
|
||||
host = models.Host()
|
||||
with session.begin():
|
||||
host.update(values)
|
||||
host.save(session)
|
||||
return host
|
||||
|
||||
|
||||
def hosts_update(context, host_id, values):
|
||||
"""Update an existing host."""
|
||||
return None
|
||||
|
||||
|
||||
def hosts_delete(context, host_id):
|
||||
"""Delete an existing host."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
host_devices = with_polymorphic(models.Device, '*')
|
||||
query = model_query(context, host_devices, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=host_id)
|
||||
query.delete()
|
||||
return
|
||||
|
||||
|
||||
def hosts_data_update(context, host_id, data):
|
||||
"""
|
||||
Update existing host variables or create when its not present.
|
||||
"""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
host_devices = with_polymorphic(models.Device, '*')
|
||||
query = model_query(context, host_devices, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=host_id)
|
||||
|
||||
try:
|
||||
host_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
for key in data:
|
||||
host_ref.variables[key] = data[key]
|
||||
|
||||
return host_ref
|
||||
|
||||
|
||||
def hosts_data_delete(context, host_id, data):
|
||||
"""Delete the existing key (variable) from region data."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
host_devices = with_polymorphic(models.Device, '*')
|
||||
query = model_query(context, host_devices, session=session,
|
||||
project_only=True)
|
||||
query = query.filter_by(id=host_id)
|
||||
|
||||
try:
|
||||
host_ref = query.with_lockmode('update').one()
|
||||
except sa_exc.NoResultFound:
|
||||
raise exceptions.NotFound()
|
||||
|
||||
for key in data:
|
||||
try:
|
||||
del host_ref.variables[data[key]]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return host_ref
|
||||
52
craton/db/sqlalchemy/migration.py
Normal file
52
craton/db/sqlalchemy/migration.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
|
||||
import alembic
|
||||
from alembic import config as alembic_config
|
||||
import alembic.migration as alembic_migration
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
|
||||
|
||||
def _alembic_config():
|
||||
path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
|
||||
config = alembic_config.Config(path)
|
||||
return config
|
||||
|
||||
|
||||
def version(config=None, engine=None):
|
||||
"""Current database version."""
|
||||
if engine is None:
|
||||
engine = enginefacade.get_legacy_facade().get_engine()
|
||||
with engine.connect() as conn:
|
||||
context = alembic_migration.MigrationContext.configure(conn)
|
||||
return context.get_current_revision()
|
||||
|
||||
|
||||
def upgrade(revision, config=None):
|
||||
"""Used for upgrading database.
|
||||
:param version: Desired database version
|
||||
"""
|
||||
revision = revision or 'head'
|
||||
config = config or _alembic_config()
|
||||
|
||||
alembic.command.upgrade(config, revision or 'head')
|
||||
|
||||
|
||||
def stamp(revision, config=None):
|
||||
"""Stamps database with provided revision.
|
||||
Don't run any migrations.
|
||||
:param revision: Should match one from repository or head - to stamp
|
||||
database with most recent revision
|
||||
"""
|
||||
config = config or _alembic_config()
|
||||
return alembic.command.stamp(config, revision=revision)
|
||||
|
||||
|
||||
def revision(message=None, autogenerate=False, config=None):
|
||||
"""Creates template for migration.
|
||||
:param message: Text that will be used for migration title
|
||||
:param autogenerate: If True - generates diff based on current database
|
||||
state
|
||||
"""
|
||||
config = config or _alembic_config()
|
||||
return alembic.command.revision(config, message=message,
|
||||
autogenerate=autogenerate)
|
||||
300
craton/db/sqlalchemy/models.py
Normal file
300
craton/db/sqlalchemy/models.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Models inventory, as defined using SQLAlchemy ORM
|
||||
There are three independent parts to a specific workflow execution:
|
||||
* configuration, as managed by a GitHub-like versioned set of config
|
||||
files (as used by Ansible and similar systems)
|
||||
* specific workflow, which is written in Python (eg with TaskFlow)
|
||||
* inventory of hosts for a given project, as organized by region, cell,
|
||||
and labels, with overrides on variables; this module models that for
|
||||
SQLAlchemy
|
||||
In particular, this means that the configuration is used to interpret
|
||||
any inventory data.
|
||||
"""
|
||||
|
||||
try:
|
||||
from collections import ChainMap
|
||||
except ImportError:
|
||||
# else get the backport of this Python 3 functionality
|
||||
from chainmap import ChainMap
|
||||
from operator import attrgetter
|
||||
|
||||
from oslo_db.sqlalchemy import models
|
||||
from sortedcontainers import SortedSet
|
||||
from sqlalchemy import (
|
||||
Boolean, Column, ForeignKey, Integer, String, Table, Text,
|
||||
UniqueConstraint)
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.ext.declarative import declarative_base, declared_attr
|
||||
from sqlalchemy.orm import object_mapper, relationship
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy_utils.types.ip_address import IPAddressType
|
||||
from sqlalchemy_utils.types.json import JSONType
|
||||
|
||||
|
||||
# TODO(jimbaker) set up table args for a given database/storage
|
||||
# engine, as configured. See
|
||||
# https://github.com/rackerlabs/craton/issues/19
|
||||
|
||||
|
||||
class CratonBase(models.ModelBase, models.TimestampMixin):
|
||||
def __repr__(self):
|
||||
mapper = object_mapper(self)
|
||||
cols = getattr(self, '_repr_columns', mapper.primary_key)
|
||||
items = [(p.key, getattr(self, p.key))
|
||||
for p in [
|
||||
mapper.get_property_by_column(c) for c in cols]]
|
||||
return "{0}({1})".format(
|
||||
self.__class__.__name__,
|
||||
', '.join(['{0}={1!r}'.format(*item) for item in items]))
|
||||
|
||||
|
||||
Base = declarative_base(cls=CratonBase)
|
||||
|
||||
|
||||
class VariableMixin(object):
|
||||
"""Some metaprogramming so we can avoid repeating this construction"""
|
||||
|
||||
@declared_attr
|
||||
def _variables(cls):
|
||||
# Camelcase the tablename to give the Variable inner class
|
||||
# here a specific class name; necessary for reporting on
|
||||
# classes
|
||||
class_name = \
|
||||
"".join(x.title() for x in cls.vars_tablename[:-1].split('_'))
|
||||
|
||||
# Because we are constructing Variable inner class with the
|
||||
# 3-arg `type` function, we need to pull out all SA columns
|
||||
# given that initialization order matters for SA!
|
||||
#
|
||||
# * Defines the primary key with correct ordering
|
||||
# * Captures references, as seen in _repr_columns
|
||||
parent_id = Column(ForeignKey(
|
||||
'%s.id' % cls.__tablename__), primary_key=True)
|
||||
key = Column(String(255), primary_key=True)
|
||||
value = Column(JSONType)
|
||||
Variable = type(class_name, (Base,), {
|
||||
'__tablename__': cls.vars_tablename,
|
||||
'parent_id': parent_id,
|
||||
'key': key,
|
||||
'value': value,
|
||||
'_repr_columns': [key, value]})
|
||||
|
||||
# Need a reference for the association proxy to lookup the
|
||||
# Variable class so it can reference
|
||||
cls.variable_class = Variable
|
||||
|
||||
return relationship(
|
||||
Variable,
|
||||
collection_class=attribute_mapped_collection('key'),
|
||||
cascade='all, delete-orphan', lazy='joined')
|
||||
|
||||
@declared_attr
|
||||
def variables(cls):
|
||||
return association_proxy(
|
||||
'_variables', 'value',
|
||||
creator=lambda key, value: cls.variable_class(key=key,
|
||||
value=value))
|
||||
|
||||
@classmethod
|
||||
def with_characteristic(self, key, value):
|
||||
return self._variables.any(key=key, value=value)
|
||||
|
||||
|
||||
class Project(Base):
|
||||
"""Supports multitenancy for all other schema elements."""
|
||||
__tablename__ = 'projects'
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(255))
|
||||
_repr_columns = [id, name]
|
||||
|
||||
# TODO(jimbaker) we will surely need to define more columns, but
|
||||
# this suffices to define multitenancy for MVP
|
||||
|
||||
# one-to-many relationship with the following objects
|
||||
regions = relationship('Region', back_populates='project')
|
||||
cells = relationship('Cell', back_populates='project')
|
||||
devices = relationship('Device', back_populates='project')
|
||||
users = relationship('User', back_populates='project')
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = 'users'
|
||||
__table_args__ = (
|
||||
UniqueConstraint("username", "project_id",
|
||||
name="uq_user0username0project"),
|
||||
)
|
||||
id = Column(Integer, primary_key=True)
|
||||
project_id = Column(
|
||||
Integer, ForeignKey('projects.id'), index=True, nullable=False)
|
||||
username = Column(String(255))
|
||||
api_key = Column(String(36))
|
||||
is_admin = Column(Boolean, default=False)
|
||||
roles = Column(JSONType)
|
||||
|
||||
project = relationship('Project', back_populates='users')
|
||||
|
||||
|
||||
class Region(Base, VariableMixin):
|
||||
__tablename__ = 'regions'
|
||||
__table_args__ = (
|
||||
UniqueConstraint("project_id", "name",
|
||||
name="uq_region0projectid0name"),
|
||||
)
|
||||
vars_tablename = 'region_variables'
|
||||
id = Column(Integer, primary_key=True)
|
||||
project_id = Column(
|
||||
Integer, ForeignKey('projects.id'), index=True, nullable=False)
|
||||
name = Column(String(255))
|
||||
note = Column(Text)
|
||||
_repr_columns = [id, name]
|
||||
|
||||
project = relationship('Project', back_populates='regions')
|
||||
cells = relationship('Cell', back_populates='region')
|
||||
devices = relationship('Device', back_populates='region')
|
||||
|
||||
|
||||
class Cell(Base, VariableMixin):
|
||||
__tablename__ = 'cells'
|
||||
__table_args__ = (
|
||||
UniqueConstraint("region_id", "name",
|
||||
name="uq_cell0regionid0name"),
|
||||
)
|
||||
vars_tablename = 'cell_variables'
|
||||
id = Column(Integer, primary_key=True)
|
||||
region_id = Column(
|
||||
Integer, ForeignKey('regions.id'), index=True, nullable=False)
|
||||
project_id = Column(
|
||||
Integer, ForeignKey('projects.id'), index=True, nullable=False)
|
||||
name = Column(String(255))
|
||||
note = Column(Text)
|
||||
_repr_columns = [id, name]
|
||||
|
||||
region = relationship('Region', back_populates='cells')
|
||||
devices = relationship('Device', back_populates='cell')
|
||||
project = relationship('Project', back_populates='cells')
|
||||
|
||||
|
||||
class Device(Base, VariableMixin):
|
||||
"""Models descriptive data about a host"""
|
||||
__tablename__ = 'devices'
|
||||
__table_args__ = (
|
||||
UniqueConstraint("region_id", "name",
|
||||
name="uq_device0regionid0name"),
|
||||
)
|
||||
vars_tablename = 'device_variables'
|
||||
id = Column(Integer, primary_key=True)
|
||||
type = Column(String(50)) # discriminant for joined table inheritance
|
||||
name = Column(String(255), nullable=False)
|
||||
region_id = Column(
|
||||
Integer, ForeignKey('regions.id'), index=True, nullable=False)
|
||||
cell_id = Column(
|
||||
Integer, ForeignKey('cells.id'), index=True, nullable=True)
|
||||
project_id = Column(
|
||||
Integer, ForeignKey('projects.id'), index=True, nullable=False)
|
||||
ip_address = Column(IPAddressType, nullable=False)
|
||||
device_type = Column(String(255), nullable=False)
|
||||
# this means the host is "active" for administration
|
||||
# the device may or may not be reachable by Ansible/other tooling
|
||||
#
|
||||
# TODO(jimbaker) generalize `note` for supporting governance
|
||||
active = Column(Boolean, default=True)
|
||||
note = Column(Text)
|
||||
_repr_columns = [id, name]
|
||||
|
||||
# many-to-many relationship with labels; labels are sorted to
|
||||
# ensure that variable resolution is stable if labels have
|
||||
# conflicting settings for a given key
|
||||
labels = relationship(
|
||||
'Label',
|
||||
secondary=lambda: device_labels,
|
||||
collection_class=lambda: SortedSet(key=attrgetter('label')))
|
||||
associated_labels = association_proxy('labels', 'label')
|
||||
|
||||
# many-to-one relationship to regions and cells
|
||||
region = relationship('Region', back_populates='devices')
|
||||
cell = relationship('Cell', back_populates='devices')
|
||||
project = relationship('Project', back_populates='devices')
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_on': type,
|
||||
'polymorphic_identity': 'devices',
|
||||
'with_polymorphic': '*'
|
||||
}
|
||||
|
||||
|
||||
class Host(Device):
|
||||
__tablename__ = 'hosts'
|
||||
id = Column(Integer, ForeignKey('devices.id'), primary_key=True)
|
||||
hostname = Device.name
|
||||
access_secret_id = Column(Integer, ForeignKey('access_secrets.id'))
|
||||
parent_id = Column(Integer, ForeignKey('hosts.id'))
|
||||
# optional many-to-one relationship to a host-specific secret
|
||||
access_secret = relationship('AccessSecret', back_populates='hosts')
|
||||
|
||||
@property
|
||||
def resolved(self):
|
||||
"""Provides a mapping that uses scope resolution for variables"""
|
||||
if self.cell:
|
||||
return ChainMap(
|
||||
self.variables,
|
||||
ChainMap(*[label.variables for label in self.labels]),
|
||||
self.cell.variables,
|
||||
self.region.variables)
|
||||
else:
|
||||
return ChainMap(
|
||||
self.variables,
|
||||
ChainMap(*[label.variables for label in self.labels]),
|
||||
self.region.variables)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'hosts',
|
||||
}
|
||||
|
||||
|
||||
device_labels = Table(
|
||||
'device_labels', Base.metadata,
|
||||
Column('device_id', ForeignKey('devices.id'), primary_key=True),
|
||||
Column('label_id', ForeignKey('labels.id'), primary_key=True))
|
||||
|
||||
|
||||
class Label(Base, VariableMixin):
|
||||
"""Models a label on hosts, with a many-to-many relationship.
|
||||
Such labels include groupings like Ansible groups; as well as
|
||||
arbitrary other labels.
|
||||
Rather than subclassing labels, we can use prefixes such as
|
||||
"group-".
|
||||
It is assumed that hierarchies for groups, if any, is represented
|
||||
in an external format, such as a group-of-group inventory in
|
||||
Ansible.
|
||||
"""
|
||||
__tablename__ = 'labels'
|
||||
vars_tablename = 'label_variables'
|
||||
id = Column(Integer, primary_key=True)
|
||||
label = Column(String(255), unique=True)
|
||||
|
||||
_repr_columns = [label]
|
||||
|
||||
def __init__(self, label):
|
||||
self.label = label
|
||||
|
||||
devices = relationship(
|
||||
"Device",
|
||||
secondary=device_labels,
|
||||
back_populates="labels")
|
||||
|
||||
|
||||
class AccessSecret(Base):
|
||||
"""Represents a secret for accessing a host. It may be shared.
|
||||
For now we assume a PEM-encoded certificate that wraps the private
|
||||
key. Such certs may or may not be encrypted; if encrypted, the
|
||||
configuration specifies how to interact with other systems, such
|
||||
as Barbican or Hashicorp Vault, to retrieve secret data to unlock
|
||||
this cert.
|
||||
Note that this does not include secrets such as Ansible vault
|
||||
files; those are stored outside the inventory database as part of
|
||||
the configuration.
|
||||
"""
|
||||
__tablename__ = 'access_secrets'
|
||||
id = Column(Integer, primary_key=True)
|
||||
cert = Column(Text)
|
||||
|
||||
hosts = relationship('Host', back_populates='access_secret')
|
||||
Reference in New Issue
Block a user