Merge "Add SQL token backend" into redux

This commit is contained in:
Jenkins 2012-02-08 21:58:52 +00:00 committed by Gerrit Code Review
commit cad74aec6e
4 changed files with 82 additions and 43 deletions

View File

@ -0,0 +1,52 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
from keystone import token
from keystone.common import sql
class TokenModel(sql.ModelBase, sql.DictBase):
__tablename__ = 'token'
id = sql.Column(sql.String(64), primary_key=True)
extra = sql.Column(sql.JsonBlob())
@classmethod
def from_dict(cls, token_dict):
# shove any non-indexed properties into extra
data = {}
token_dict_copy = token_dict.copy()
data['id'] = token_dict_copy.pop('id')
data['extra'] = token_dict_copy
return cls(**data)
def to_dict(self):
extra_copy = self.extra.copy()
extra_copy['id'] = self.id
return extra_copy
class Token(sql.Base, token.Driver):
# Public interface
def get_token(self, token_id):
session = self.get_session()
token_ref = session.query(TokenModel).filter_by(id=token_id).first()
if not token_ref:
return
return token_ref.to_dict()
def create_token(self, token_id, data):
data['id'] = token_id
session = self.get_session()
with session.begin():
token_ref = TokenModel.from_dict(data)
session.add(token_ref)
session.flush()
return token_ref.to_dict()
def delete_token(self, token_id):
session = self.get_session()
token_ref = session.query(TokenModel)\
.filter_by(id=token_id)\
.first()
with session.begin():
session.delete(token_ref)
session.flush()

View File

@ -1,3 +1,7 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import uuid
class IdentityTests(object):
def test_authenticate_bad_user(self):
self.assertRaises(AssertionError,
@ -197,3 +201,18 @@ class IdentityTests(object):
self.assertEqual(tenant_ref['id'], 'fake1')
tenant_ref = self.identity_api.get_tenant('fake2')
self.assert_(tenant_ref is None)
class TokenTests(object):
def test_token_crud(self):
token_id = uuid.uuid4().hex
data = {'id': token_id, 'a': 'b'}
data_ref = self.token_api.create_token(token_id, data)
self.assertDictEquals(data_ref, data)
new_data_ref = self.token_api.get_token(token_id)
self.assertEquals(new_data_ref, data)
self.token_api.delete_token(token_id)
deleted_data_ref = self.token_api.get_token(token_id)
self.assertTrue(deleted_data_ref is None)

View File

@ -1,5 +1,3 @@
import uuid
from keystone import test
from keystone.identity.backends import kvs as identity_kvs
from keystone.token.backends import kvs as token_kvs
@ -16,25 +14,11 @@ class KvsIdentity(test.TestCase, test_backend.IdentityTests):
self.load_fixtures(default_fixtures)
class KvsToken(test.TestCase):
class KvsToken(test.TestCase, test_backend.TokenTests):
def setUp(self):
super(KvsToken, self).setUp()
self.token_api = token_kvs.Token(db={})
def test_token_crud(self):
token_id = uuid.uuid4().hex
data = {'id': token_id,
'a': 'b'}
data_ref = self.token_api.create_token(token_id, data)
self.assertDictEquals(data_ref, data)
new_data_ref = self.token_api.get_token(token_id)
self.assertEquals(new_data_ref, data)
self.token_api.delete_token(token_id)
deleted_data_ref = self.token_api.get_token(token_id)
self.assert_(deleted_data_ref is None)
class KvsCatalog(test.TestCase):
def setUp(self):

View File

@ -1,10 +1,10 @@
import os
import uuid
# vim: tabstop=4 shiftwidth=4 softtabstop=4
from keystone import config
from keystone import test
from keystone.common.sql import util as sql_util
from keystone.identity.backends import sql as identity_sql
from keystone.token.backends import sql as token_sql
import test_backend
import default_fixtures
@ -13,14 +13,9 @@ import default_fixtures
CONF = config.CONF
class SqlIdentity(test.TestCase, test_backend.IdentityTests):
def setUp(self):
super(SqlIdentity, self).setUp()
try:
os.unlink('bla.db')
except Exception:
pass
CONF(config_files=[test.etcdir('keystone.conf'),
test.testsdir('test_overrides.conf'),
test.testsdir('backend_sql.conf')])
@ -29,25 +24,14 @@ class SqlIdentity(test.TestCase, test_backend.IdentityTests):
self.load_fixtures(default_fixtures)
#class SqlToken(test_backend_kvs.KvsToken):
# def setUp(self):
# super(SqlToken, self).setUp()
# self.token_api = sql.SqlToken()
# self.load_fixtures(default_fixtures)
# def test_token_crud(self):
# token_id = uuid.uuid4().hex
# data = {'id': token_id,
# 'a': 'b'}
# data_ref = self.token_api.create_token(token_id, data)
# self.assertDictEquals(data_ref, data)
# new_data_ref = self.token_api.get_token(token_id)
# self.assertEquals(new_data_ref, data)
# self.token_api.delete_token(token_id)
# deleted_data_ref = self.token_api.get_token(token_id)
# self.assert_(deleted_data_ref is None)
class SqlToken(test.TestCase, test_backend.TokenTests):
def setUp(self):
super(SqlToken, self).setUp()
CONF(config_files=[test.etcdir('keystone.conf'),
test.testsdir('test_overrides.conf'),
test.testsdir('backend_sql.conf')])
sql_util.setup_test_database()
self.token_api = token_sql.Token()
#class SqlCatalog(test_backend_kvs.KvsCatalog):