Adding files to support database migration via alembic.

Change-Id: I57b6b62ec6da37608729fb353dce73971a419722
This commit is contained in:
John Wood 2013-07-05 09:40:53 -05:00
parent 005f53745e
commit 2424670f2b
28 changed files with 570 additions and 19 deletions

3
.gitignore vendored
View File

@ -47,3 +47,6 @@ coverage.xml
# Misc. generated files # Misc. generated files
versiononly.txt versiononly.txt
*.orig
myapp.profile
*.out.myapp

View File

@ -71,6 +71,9 @@ def load_body(req, resp=None, validator=None):
except exception.InvalidObject as e: except exception.InvalidObject as e:
LOG.exception("Failed to validate JSON information") LOG.exception("Failed to validate JSON information")
abort(falcon.HTTP_400, str(e), req, resp) abort(falcon.HTTP_400, str(e), req, resp)
except exception.UnsupportedField as e:
LOG.exception("Provided field value is not supported")
abort(falcon.HTTP_400, str(e), req, resp)
except exception.LimitExceeded as e: except exception.LimitExceeded as e:
LOG.exception("Data limit exceeded") LOG.exception("Data limit exceeded")
abort(falcon.HTTP_413, str(e), req, resp) abort(falcon.HTTP_413, str(e), req, resp)

View File

@ -232,6 +232,7 @@ def handle_exceptions(operation_name=_('System')):
try: try:
fn(inst, req, resp, *args, **kwargs) fn(inst, req, resp, *args, **kwargs)
except falcon.HTTPError as f: except falcon.HTTPError as f:
LOG.exception('Falcon error seen')
raise f # Already converted to Falcon exception, just reraise raise f # Already converted to Falcon exception, just reraise
except Exception: except Exception:
message = _('{0} failure seen - please contact site ' message = _('{0} failure seen - please contact site '

View File

@ -111,6 +111,10 @@ class Forbidden(BarbicanException):
message = _("You are not authorized to complete this action.") message = _("You are not authorized to complete this action.")
class NotSupported(BarbicanException):
message = _("Operation is not supported.")
class ForbiddenPublicImage(Forbidden): class ForbiddenPublicImage(Forbidden):
message = _("You are not authorized to complete this action.") message = _("You are not authorized to complete this action.")
@ -264,6 +268,11 @@ class InvalidObject(BarbicanException):
"'%(schema)s': %(reason)s") "'%(schema)s': %(reason)s")
class UnsupportedField(BarbicanException):
message = _("No support for value set on field '%(field)s' on "
"schema '%(schema)s': %(reason)s")
class UnsupportedHeaderFeature(BarbicanException): class UnsupportedHeaderFeature(BarbicanException):
message = _("Provided header feature is unsupported: %(feature)s") message = _("Provided header feature is unsupported: %(feature)s")

View File

@ -46,7 +46,9 @@ def create_secret(data, tenant, crypto_manager,
""" """
Common business logic to create a secret. Common business logic to create a secret.
""" """
time_keeper = utils.TimeKeeper('Create Secret Resource')
new_secret = models.Secret(data) new_secret = models.Secret(data)
time_keeper.mark('after Secret model create')
new_datum = None new_datum = None
if 'plain_text' in data: if 'plain_text' in data:
@ -60,28 +62,37 @@ def create_secret(data, tenant, crypto_manager,
new_datum = crypto_manager.encrypt(data['plain_text'], new_datum = crypto_manager.encrypt(data['plain_text'],
new_secret, new_secret,
tenant) tenant)
time_keeper.mark('after encrypt')
elif ok_to_generate: elif ok_to_generate:
LOG.debug('Generating new secret...') LOG.debug('Generating new secret...')
# TODO: Generate a good key
new_datum = crypto_manager.generate_data_encryption_key(new_secret, new_datum = crypto_manager.generate_data_encryption_key(new_secret,
tenant) tenant)
time_keeper.mark('after secret generate')
else: else:
LOG.debug('Creating metadata only for the new secret. ' LOG.debug('Creating metadata only for the new secret. '
'A subsequent PUT is required') 'A subsequent PUT is required')
crypto_manager.supports(new_secret, tenant) crypto_manager.supports(new_secret, tenant)
time_keeper.mark('after supports check')
# Create Secret entities in datastore. # Create Secret entities in datastore.
secret_repo.create_from(new_secret) secret_repo.create_from(new_secret)
time_keeper.mark('after Secret datastore create')
new_assoc = models.TenantSecret() new_assoc = models.TenantSecret()
time_keeper.mark('after TenantSecret model create')
new_assoc.tenant_id = tenant.id new_assoc.tenant_id = tenant.id
new_assoc.secret_id = new_secret.id new_assoc.secret_id = new_secret.id
new_assoc.role = "admin" new_assoc.role = "admin"
new_assoc.status = models.States.ACTIVE new_assoc.status = models.States.ACTIVE
tenant_secret_repo.create_from(new_assoc) tenant_secret_repo.create_from(new_assoc)
time_keeper.mark('after TenantSecret datastore create')
if new_datum: if new_datum:
new_datum.secret_id = new_secret.id new_datum.secret_id = new_secret.id
datum_repo.create_from(new_datum) datum_repo.create_from(new_datum)
time_keeper.mark('after Datum datastore create')
time_keeper.dump()
return new_secret return new_secret

View File

@ -17,6 +17,7 @@
Common utilities for Barbican. Common utilities for Barbican.
""" """
import time
from oslo.config import cfg from oslo.config import cfg
import barbican.openstack.common.log as logging import barbican.openstack.common.log as logging
@ -51,3 +52,43 @@ def hostname_for_refs(keystone_id=None, resource=None):
# error. # error.
def getLogger(name): def getLogger(name):
return logging.getLogger(name) return logging.getLogger(name)
class TimeKeeper(object):
"""
Keeps track of elapsed times and then allows for dumping a smmary to
logs. This class can be used to profile a method as a fine grain level.
"""
def __init__(self, name, logger=None):
self.logger = logger or getLogger(__name__)
self.name = name
self.time_start = time.time()
self.time_last = self.time_start
self.elapsed = []
def mark(self, note=None):
"""
Mark a moment in time, with an optional note as to what is
occurring at the time.
:param note: Optional note
"""
time_curr = time.time()
self.elapsed.append((time_curr, time_curr - self.time_last, note))
self.time_last = time_curr
def dump(self):
"""
Dump the elapsed time(s) to log.
"""
self.logger.debug("Timing output for '{0}'".format(self.name))
for timec, timed, note in self.elapsed:
self.logger.debug(" time current/elapsed/notes:"
"{0:.3f}/{1:.0f}/{2}".format(timec,
timed * 1000.,
note))
time_current = time.time()
total_elapsed = time_current - self.time_start
self.logger.debug(" Final time/elapsed:"
"{0:.3f}/{1:.0f}".format(time_current,
total_elapsed * 1000.))

View File

@ -178,4 +178,18 @@ class NewOrderValidator(ValidatorBase):
reason=_("'secret' attributes " reason=_("'secret' attributes "
"are required")) "are required"))
# Validation secret generation related fields.
# TODO: Invoke the crypto plugin for this purpose
if secret.get('algorithm') != 'aes':
raise exception.UnsupportedField(field="algorithm",
schema=schema_name,
reason=_("Only 'aes' "
"supported"))
bit_length = int(secret.get('bit_length', 0))
if not bit_length in (128, 192, 256):
raise exception.UnsupportedField(field="bit_length",
schema=schema_name,
reason=_("Must be one of 128, "
"192, or 256"))
return json_data return json_data

View File

View File

@ -0,0 +1,57 @@
# A generic, single database configuration
[alembic]
# path to migration scripts
script_location = %(here)s/alembic_migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# default to an empty string because the Barbican migration process will
# extract the correct value and set it programatically before alemic is fully
# invoked.
sqlalchemy.url =
#sqlalchemy.url = driver://user:pass@localhost/dbname
#sqlalchemy.url = sqlite:///barbican.sqlite
#sqlalchemy.url = sqlite:////var/lib/barbican/barbican.sqlite
#sqlalchemy.url = postgresql+psycopg2://postgres:postgres@localhost:5432/barbican_api
# Logging configuration
[loggers]
keys = alembic
#keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = DEBUG
handlers = console
qualname =
[logger_sqlalchemy]
level = DEBUG
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,84 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import create_engine, pool
from barbican.model.models import BASE
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
# Note that the 'config' instance is not available in for unit testing.
try:
config = context.config
except Exception:
config = None
# WARNING! The following was autogenerated by Alembic as part of setting up
# the initial environment. Unfortunately it also **clobbers** the logging
# for the rest of this applicaton, so please do not use it!
# Interpret the config file for Python logging.
# This line sets up loggers basically.
#fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = BASE.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_sqlalchemy_url():
return config.barbican_sqlalchemy_url or config \
.get_main_option("sqlalchemy.url")
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(url=get_sqlalchemy_url())
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = create_engine(
get_sqlalchemy_url(),
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if config:
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,26 @@
"""change test column back to not null
Revision ID: 153ed0150d78
Revises: 40a9c7408b51
Create Date: 2013-06-18 17:33:20.281076
"""
# revision identifiers, used by Alembic.
revision = '153ed0150d78'
down_revision = '40a9c7408b51'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('test', u'name',
existing_type=sa.String(50),
nullable=False)
def downgrade():
op.alter_column('test', u'name',
existing_type=sa.String(50),
nullable=True)

View File

@ -0,0 +1,27 @@
"""create test table
Revision ID: 1a0c2cdafb38
Revises: 40a9c7408b51
Create Date: 2013-06-17 16:42:13.634746
"""
# revision identifiers, used by Alembic.
revision = '1a0c2cdafb38'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'test',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('description', sa.Unicode(200)),
)
def downgrade():
op.drop_table('test')

View File

@ -0,0 +1,22 @@
"""test column add
Revision ID: 1cfb48928f42
Revises: 153ed0150d78
Create Date: 2013-06-19 00:15:03.656628
"""
# revision identifiers, used by Alembic.
revision = '1cfb48928f42'
down_revision = '153ed0150d78'
from alembic import op
from sqlalchemy import Column, String
def upgrade():
op.add_column('secrets', Column('dummy_column', String()))
def downgrade():
op.drop_column('secrets', 'dummy_column')

View File

@ -0,0 +1,26 @@
"""Test auto migration
Revision ID: 40a9c7408b51
Revises: None
Create Date: 2013-06-17 10:42:20.078204
"""
# revision identifiers, used by Alembic.
revision = '40a9c7408b51'
down_revision = '1a0c2cdafb38'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('test', u'name',
existing_type=sa.String(50),
nullable=True)
def downgrade():
op.alter_column('test', u'name',
existing_type=sa.String(50),
nullable=False)

View File

@ -0,0 +1,33 @@
"""test change
Revision ID: 53c2ae2df15d
Revises: 1cfb48928f42
Create Date: 2013-06-19 23:35:52.802639
"""
# revision identifiers, used by Alembic.
revision = '53c2ae2df15d'
down_revision = '1cfb48928f42'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table(u'test')
op.drop_column('secrets', u'dummy_column')
### end Alembic commands ###
def downgrade():
op.add_column('secrets', sa.Column(u'dummy_column', sa.VARCHAR(),
nullable=True))
op.create_table(
'test',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('description', sa.Unicode(200)),
)

View File

@ -0,0 +1,55 @@
"""
Interace to the Alembic migration process and environment.
Concepts in this file are based on Quantum's Alembic approach.
Available Alembic commands are detailed here:
https://alembic.readthedocs.org/en/latest/api.html#module-alembic.command
"""
import os
from alembic import command as alembic_command
from alembic import config as alembic_config
from oslo.config import cfg
from barbican.common import utils
LOG = utils.getLogger(__name__)
db_opts = [
cfg.StrOpt('sql_connection', default=None),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
def init_config(sql_url=None):
"""Initialize and return the Alembic configuration."""
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini')
)
config.set_main_option('script_location',
'barbican.model.migration:alembic_migrations')
config.barbican_sqlalchemy_url = sql_url or CONF.sql_connection
return config
def upgrade(to_version='head', sql_url=None):
"""Upgrade to the specified version."""
alembic_cfg = init_config(sql_url)
alembic_command.upgrade(alembic_cfg, to_version)
def downgrade(to_version, sql_url=None):
"""Downgrade to the specified version."""
alembic_cfg = init_config(sql_url)
alembic_command.downgrade(alembic_cfg, to_version)
def generate(autogenerate=True, message='generate changes', sql_url=None):
"""Generate a version file."""
alembic_cfg = init_config(sql_url)
alembic_command.revision(alembic_cfg, message=message,
autogenerate=autogenerate)

View File

@ -31,8 +31,8 @@ import sqlalchemy.orm as sa_orm
from sqlalchemy import or_ from sqlalchemy import or_
from barbican.common import exception from barbican.common import exception
#TODO: from barbican.db.sqlalchemy import migration
from barbican.model import models from barbican.model import models
from barbican.model.migration import commands
from barbican.openstack.common import timeutils from barbican.openstack.common import timeutils
from barbican.openstack.common.gettextutils import _ from barbican.openstack.common.gettextutils import _
from barbican.common import utils from barbican.common import utils
@ -142,11 +142,9 @@ def get_engine():
if CONF.db_auto_create: if CONF.db_auto_create:
LOG.info(_('auto-creating barbican registry DB')) LOG.info(_('auto-creating barbican registry DB'))
models.register_models(_ENGINE) models.register_models(_ENGINE)
#TODO: try:
#TODO: migration.version_control() # Upgrade the database to the latest version.
#TODO: except exception.DatabaseMigrationError: commands.upgrade()
#TODO: # only arises when the DB exists and is under version control
#TODO: pass
else: else:
LOG.info(_('not auto-creating barbican registry DB')) LOG.info(_('not auto-creating barbican registry DB'))
@ -262,7 +260,7 @@ class BaseRepo(object):
def create_from(self, entity): def create_from(self, entity):
"""Sub-class hook: create from entity.""" """Sub-class hook: create from entity."""
start = time.time() # DEBUG
if not entity: if not entity:
msg = "Must supply non-None {0}.".format(self._do_entity_name) msg = "Must supply non-None {0}.".format(self._do_entity_name)
raise exception.Invalid(msg) raise exception.Invalid(msg)
@ -290,6 +288,8 @@ class BaseRepo(object):
LOG.exception('Problem saving entity for create') LOG.exception('Problem saving entity for create')
raise exception.Duplicate("Entity ID %s already exists!" raise exception.Duplicate("Entity ID %s already exists!"
% values['id']) % values['id'])
LOG.debug('Elapsed repo '
'create secret:{0}'.format(time.time() - start)) # DEBUG
return entity return entity

View File

@ -658,9 +658,9 @@ class WhenCreatingOrdersUsingOrdersResource(unittest.TestCase):
def setUp(self): def setUp(self):
self.secret_name = 'name' self.secret_name = 'name'
self.secret_mime_type = 'text/plain' self.secret_mime_type = 'text/plain'
self.secret_algorithm = "algo" self.secret_algorithm = "aes"
self.secret_bit_length = 512 self.secret_bit_length = 128
self.secret_cypher_type = "cytype" self.secret_cypher_type = "cbc"
self.tenant_internal_id = 'tenantid1234' self.tenant_internal_id = 'tenantid1234'
self.tenant_keystone_id = 'keystoneid1234' self.tenant_keystone_id = 'keystoneid1234'

View File

@ -221,9 +221,9 @@ class WhenTestingOrderValidator(unittest.TestCase):
def setUp(self): def setUp(self):
self.name = 'name' self.name = 'name'
self.mime_type = 'text/plain' self.mime_type = 'text/plain'
self.secret_algorithm = 'algo' self.secret_algorithm = 'aes'
self.secret_bit_length = 512 self.secret_bit_length = 128
self.secret_cypher_type = 'cytype' self.secret_cypher_type = 'cbc'
self.secret_req = {'name': self.name, self.secret_req = {'name': self.name,
'mime_type': self.mime_type, 'mime_type': self.mime_type,

View File

@ -18,6 +18,8 @@ OPTS='--daemonize /var/log/barbican/uwsgi.log'
# Configure for a local deployment environment: # Configure for a local deployment environment:
if [ ! -f $CONF_FILE ]; if [ ! -f $CONF_FILE ];
then then
echo 'Running locally...'
PKG_DIR=$PWD/etc/$PKG PKG_DIR=$PWD/etc/$PKG
CONF_FILE=./etc/$PKG/barbican-api.conf CONF_FILE=./etc/$PKG/barbican-api.conf
PYTHONPATH=$VENV_PYTHON:$PYTHONPATH PYTHONPATH=$VENV_PYTHON:$PYTHONPATH
@ -46,4 +48,5 @@ fi
#fi #fi
echo 'Running Barbican uWSGI Emperor '$PKG_DIR/vassals echo 'Running Barbican uWSGI Emperor '$PKG_DIR/vassals
echo 'Executing uwsgi with these options: '$PKG_DIR/vassals' '$OPTS
uwsgi --master --emperor $PKG_DIR/vassals $OPTS uwsgi --master --emperor $PKG_DIR/vassals $OPTS

98
bin/barbican-db-manage.py Executable file
View File

@ -0,0 +1,98 @@
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import os
import sys
import argparse
sys.path.insert(0, os.getcwd())
from barbican.model.migration import commands
class DatabaseManager:
"""
Builds and executes a CLI parser to manage the Barbican database,
using Alembic commands.
"""
def __init__(self):
self.parser = self.get_main_parser()
self.subparsers = self.parser.add_subparsers(title='subcommands',
description=
'Action to perform')
self.add_revision_args()
self.add_downgrade_args()
self.add_upgrade_args()
def get_main_parser(self):
"""Create top-level parser and arguments."""
parser = argparse.ArgumentParser(description='Barbican DB manager.')
parser.add_argument('--dburl', '-d', default=None,
help='URL to the database)')
return parser
def add_revision_args(self):
"""Create 'revision' command parser and arguments."""
create_parser = self.subparsers.add_parser('revision', help='Create a '
'new DB version file.')
create_parser.add_argument('--message', '-m', default='DB change',
help='the message for the DB change')
create_parser.add_argument('--autogenerate',
help='autogenerate from models',
action='store_true')
create_parser.set_defaults(func=self.revision)
def add_upgrade_args(self):
"""Create 'upgrade' command parser and arguments."""
create_parser = self.subparsers.add_parser('upgrade',
help='Upgrade to a '
'future version DB '
'version file')
create_parser.add_argument('--version', '-v', default='head',
help='the version to upgrade to, or else '
'the latest/head if not specified.')
create_parser.set_defaults(func=self.upgrade)
def add_downgrade_args(self):
"""Create 'downgrade' command parser and arguments."""
create_parser = self.subparsers.add_parser('downgrade',
help='Downgrade to a '
'previous DB '
'version file.')
create_parser.add_argument('--version', '-v', default='need version',
help='the version to downgrade back to.')
create_parser.set_defaults(func=self.downgrade)
def revision(self, args):
"""Process the 'revision' Alembic command."""
commands.generate(autogenerate=args.autogenerate,
message=args.message,
sql_url=args.dburl)
def upgrade(self, args):
"""Process the 'upgrade' Alembic command."""
commands.upgrade(to_version=args.version,
sql_url=args.dburl)
def downgrade(self, args):
"""Process the 'downgrade' Alembic command."""
commands.downgrade(to_version=args.version,
sql_url=args.dburl)
def execute(self):
"""Parse the command line arguments."""
args = self.parser.parse_args()
# Perform other setup here...
args.func(args)
def main():
dm = DatabaseManager()
dm.execute()
if __name__ == '__main__':
main()

View File

@ -33,7 +33,6 @@ possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
sys.path.insert(0, possible_topdir) sys.path.insert(0, possible_topdir)
print "path: ",sys.path," pardir:",os.pardir
gettext.install('barbican', unicode=1) gettext.install('barbican', unicode=1)

View File

@ -3,6 +3,11 @@
pipeline = unauthenticated-context apiapp pipeline = unauthenticated-context apiapp
####pipeline = simple apiapp ####pipeline = simple apiapp
#Use this pipeline to activate a repoze.profile middleware and HTTP port,
# to provide profiling information for the REST API processing.
[pipeline:barbican-profile]
pipeline = unauthenticated-context egg:Paste#cgitb egg:Paste#httpexceptions profile apiapp
#Use this pipeline for keystone auth #Use this pipeline for keystone auth
[pipeline:barbican-api-keystone] [pipeline:barbican-api-keystone]
pipeline = keystone_authtoken context apiapp pipeline = keystone_authtoken context apiapp
@ -32,3 +37,12 @@ admin_password = orange
auth_version = v2.0 auth_version = v2.0
#delay failing perhaps to log the unauthorized request in barbican .. #delay failing perhaps to log the unauthorized request in barbican ..
#delay_auth_decision = true #delay_auth_decision = true
[filter:profile]
use = egg:repoze.profile
log_filename = myapp.profile
cachegrind_filename = cachegrind.out.myapp
discard_first_request = true
path = /__profile__
flush_at_shutdown = true
unwind = false

View File

@ -31,10 +31,10 @@ backlog = 4096
# registry server. Any valid SQLAlchemy connection string is fine. # registry server. Any valid SQLAlchemy connection string is fine.
# See: http://www.sqlalchemy.org/docs/05/reference/sqlalchemy/connections.html#sqlalchemy.create_engine # See: http://www.sqlalchemy.org/docs/05/reference/sqlalchemy/connections.html#sqlalchemy.create_engine
# Uncomment this for local dev, putting db in project directory: # Uncomment this for local dev, putting db in project directory:
sql_connection = sqlite:///barbican.sqlite #sql_connection = sqlite:///barbican.sqlite
# Note: For absolute addresses, use '////' slashes after 'sqlite:' # Note: For absolute addresses, use '////' slashes after 'sqlite:'
# Uncomment for a more global development environment # Uncomment for a more global development environment
#sql_connection = sqlite:////var/lib/barbican/barbican.sqlite sql_connection = sqlite:////var/lib/barbican/barbican.sqlite
# Period in seconds after which SQLAlchemy should reestablish its connection # Period in seconds after which SQLAlchemy should reestablish its connection
# to the database. # to the database.

View File

@ -15,3 +15,5 @@ pycrypto>=2.6
python-dateutil>=2.1 python-dateutil>=2.1
jsonschema>=2.0.0 jsonschema>=2.0.0
SQLAlchemy>=0.8.1 SQLAlchemy>=0.8.1
alembic>=0.5.0
psycopg2>=2.5.1