summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMorgan Fainberg <m@metacloud.com>2013-12-08 21:11:59 -0800
committerMorgan Fainberg <m@metacloud.com>2014-01-13 09:53:00 -0800
commit9837137580688d0c8509c2a5337004d55f2b88e0 (patch)
treecd23282133c265a7d0bab984fc83412a8b0b3336
parent83db9722c201eca9fa47d93fce7d09dd6f28e053 (diff)
downloadkeystone-9837137580688d0c8509c2a5337004d55f2b88e0.tar.gz
Convert Token KVS backend to new KeyValueStore Impl
This patchset converts the current token KVS backend to the new dogpile.cache based KeyValueStore implementation. The changeset provides the same public interface as the previous KVS driver but drastically reworks the internal mechanisms to be more similar to the memcache based driver so that direct access to the in-mem db is not required to effectively use the KeyValueStore. KVS token backend is not deprecated anymore. However, it is still inadvisable to use the basic-inmemory KVS backend for anything outside of testing. DocImpact bp: dogpile-kvs-backends Change-Id: Ib278636d4ffa3f7152287a48d02be598c50f698a
-rw-r--r--keystone/tests/test_backend.py107
-rw-r--r--keystone/tests/test_backend_kvs.py69
-rw-r--r--keystone/tests/test_backend_memcache.py4
-rw-r--r--keystone/token/backends/kvs.py370
-rw-r--r--keystone/token/core.py5
5 files changed, 439 insertions, 116 deletions
diff --git a/keystone/tests/test_backend.py b/keystone/tests/test_backend.py
index 20bfaaec5..11c808025 100644
--- a/keystone/tests/test_backend.py
+++ b/keystone/tests/test_backend.py
@@ -20,14 +20,16 @@ import hashlib
import mock
import uuid
+from six import moves
+
from keystone.catalog import core
from keystone import config
from keystone import exception
from keystone.openstack.common import timeutils
from keystone import tests
from keystone.tests import default_fixtures
-from six import moves
-
+from keystone.tests import test_utils
+from keystone.token import provider
CONF = config.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
@@ -2902,27 +2904,49 @@ class TokenTests(object):
self.assertRaises(exception.TokenNotFound,
self.token_api.delete_token, token_id)
- def create_token_sample_data(self, tenant_id=None, trust_id=None,
- user_id="testuserid"):
- token_id = self._create_token_id()
+ def create_token_sample_data(self, token_id=None, tenant_id=None,
+ trust_id=None, user_id=None, expires=None):
+ if token_id is None:
+ token_id = self._create_token_id()
+ if user_id is None:
+ user_id = 'testuserid'
+ # FIXME(morganfainberg): These tokens look nothing like "Real" tokens.
+ # This should be updated when token_api is updated to merge in the
+ # issue_token logic from the providers (token issuance should be a
+ # pipeline). The fix should be in implementation of blueprint:
+ # token-issuance-pipeline
data = {'id': token_id, 'a': 'b',
'user': {'id': user_id}}
if tenant_id is not None:
data['tenant'] = {'id': tenant_id, 'name': tenant_id}
if tenant_id is NULL_OBJECT:
data['tenant'] = None
+ if expires is not None:
+ data['expires'] = expires
if trust_id is not None:
data['trust_id'] = trust_id
+ data.setdefault('access', {}).setdefault('trust', {})
+ # Testuserid2 is used here since a trustee will be different in
+ # the cases of impersonation and therefore should not match the
+ # token's user_id.
+ data['access']['trust']['trustee_user_id'] = 'testuserid2'
+ data['token_version'] = provider.V2
+ # Issue token stores a copy of all token data at token['token_data'].
+ # This emulates that assumption as part of the test.
+ data['token_data'] = copy.deepcopy(data)
new_token = self.token_api.create_token(token_id, data)
- return new_token['id']
+ return new_token['id'], data
def test_delete_tokens(self):
tokens = self.token_api._list_tokens('testuserid')
self.assertEqual(len(tokens), 0)
- token_id1 = self.create_token_sample_data('testtenantid')
- token_id2 = self.create_token_sample_data('testtenantid')
- token_id3 = self.create_token_sample_data(tenant_id='testtenantid',
- user_id="testuserid1")
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id3, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1')
tokens = self.token_api._list_tokens('testuserid')
self.assertEqual(len(tokens), 2)
self.assertIn(token_id2, tokens)
@@ -2941,11 +2965,13 @@ class TokenTests(object):
def test_delete_tokens_trust(self):
tokens = self.token_api._list_tokens(user_id='testuserid')
self.assertEqual(len(tokens), 0)
- token_id1 = self.create_token_sample_data(tenant_id='testtenantid',
- trust_id='testtrustid')
- token_id2 = self.create_token_sample_data(tenant_id='testtenantid',
- user_id="testuserid1",
- trust_id="testtrustid1")
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1',
+ trust_id='testtrustid1')
tokens = self.token_api._list_tokens('testuserid')
self.assertEqual(len(tokens), 1)
self.assertIn(token_id1, tokens)
@@ -2959,11 +2985,11 @@ class TokenTests(object):
def _test_token_list(self, token_list_fn):
tokens = token_list_fn('testuserid')
self.assertEqual(len(tokens), 0)
- token_id1 = self.create_token_sample_data()
+ token_id1, data = self.create_token_sample_data()
tokens = token_list_fn('testuserid')
self.assertEqual(len(tokens), 1)
self.assertIn(token_id1, tokens)
- token_id2 = self.create_token_sample_data()
+ token_id2, data = self.create_token_sample_data()
tokens = token_list_fn('testuserid')
self.assertEqual(len(tokens), 2)
self.assertIn(token_id2, tokens)
@@ -2980,10 +3006,10 @@ class TokenTests(object):
# tenant-specific tokens
tenant1 = uuid.uuid4().hex
tenant2 = uuid.uuid4().hex
- token_id3 = self.create_token_sample_data(tenant_id=tenant1)
- token_id4 = self.create_token_sample_data(tenant_id=tenant2)
+ token_id3, data = self.create_token_sample_data(tenant_id=tenant1)
+ token_id4, data = self.create_token_sample_data(tenant_id=tenant2)
# test for existing but empty tenant (LP:1078497)
- token_id5 = self.create_token_sample_data(tenant_id=NULL_OBJECT)
+ token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT)
tokens = token_list_fn('testuserid')
self.assertEqual(len(tokens), 3)
self.assertNotIn(token_id1, tokens)
@@ -3008,7 +3034,7 @@ class TokenTests(object):
def test_token_list_trust(self):
trust_id = uuid.uuid4().hex
- token_id5 = self.create_token_sample_data(trust_id=trust_id)
+ token_id5, data = self.create_token_sample_data(trust_id=trust_id)
tokens = self.token_api._list_tokens('testuserid', trust_id=trust_id)
self.assertEqual(len(tokens), 1)
self.assertIn(token_id5, tokens)
@@ -3174,6 +3200,45 @@ class TokenTests(object):
for t in self.token_api.list_revoked_tokens():
self.assertIn('expires', t)
+ def test_create_unicode_token_id(self):
+ token_id = unicode(self._create_token_id())
+ self.create_token_sample_data(token_id=token_id)
+ self.token_api.get_token(token_id)
+
+ def test_create_unicode_user_id(self):
+ user_id = unicode(uuid.uuid4().hex)
+ token_id, data = self.create_token_sample_data(user_id=user_id)
+ self.token_api.get_token(token_id)
+
+ def test_list_tokens_unicode_user_id(self):
+ user_id = unicode(uuid.uuid4().hex)
+ self.token_api.list_tokens(user_id)
+
+ def test_token_expire_timezone(self):
+
+ @test_utils.timezone
+ def _create_token(expire_time):
+ token_id = uuid.uuid4().hex
+ user_id = unicode(uuid.uuid4().hex)
+ return self.create_token_sample_data(token_id=token_id,
+ user_id=user_id,
+ expires=expire_time)
+
+ for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
+ test_utils.TZ = 'UTC' + d
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ token_id, data_in = _create_token(expire_time)
+ data_get = self.token_api.get_token(token_id)
+
+ self.assertEqual(data_in['id'], data_get['id'],
+ 'TZ=%s' % test_utils.TZ)
+
+ expire_time_expired = (
+ timeutils.utcnow() + datetime.timedelta(minutes=-1))
+ token_id, data_in = _create_token(expire_time_expired)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, data_in['id'])
+
class TokenCacheInvalidation(object):
def _create_test_data(self):
diff --git a/keystone/tests/test_backend_kvs.py b/keystone/tests/test_backend_kvs.py
index 7b9cc2381..09173fd37 100644
--- a/keystone/tests/test_backend_kvs.py
+++ b/keystone/tests/test_backend_kvs.py
@@ -13,11 +13,13 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+import datetime
import uuid
from keystone import config
from keystone import exception
from keystone import identity
+from keystone.openstack.common import timeutils
from keystone import tests
from keystone.tests import default_fixtures
from keystone.tests import test_backend
@@ -83,6 +85,73 @@ class KvsToken(tests.TestCase, test_backend.TokenTests):
'keystone.identity.backends.kvs.Identity')
self.load_backends()
+ def test_flush_expired_token(self):
+ self.assertRaises(exception.NotImplemented,
+ self.token_api.flush_expired_tokens)
+
+ def _update_user_token_index_direct(self, user_key, token_id, new_data):
+ token_list = self.token_api.driver._get_user_token_list_with_expiry(
+ user_key)
+ # Update the user-index so that the expires time is _actually_ expired
+ # since we do not do an explicit get on the token, we only reference
+ # the data in the user index (to save extra round-trips to the kvs
+ # backend).
+ for i, data in enumerate(token_list):
+ if data[0] == token_id:
+ token_list[i] = new_data
+ break
+ self.token_api.driver._store.set(user_key, token_list)
+
+ def test_cleanup_user_index_on_create(self):
+ user_id = unicode(uuid.uuid4().hex)
+ valid_token_id, data = self.create_token_sample_data(user_id=user_id)
+ expired_token_id, expired_data = self.create_token_sample_data(
+ user_id=user_id)
+
+ expire_delta = datetime.timedelta(seconds=86400)
+
+ # NOTE(morganfainberg): Directly access the data cache since we need to
+ # get expired tokens as well as valid tokens. token_api.list_tokens()
+ # will not return any expired tokens in the list.
+ user_key = self.token_api.driver._prefix_user_id(user_id)
+ user_token_list = self.token_api.driver._store.get(user_key)
+ valid_token_ref = self.token_api.get_token(valid_token_id)
+ expired_token_ref = self.token_api.get_token(expired_token_id)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (expired_token_id, timeutils.isotime(expired_token_ref['expires'],
+ subsecond=True))]
+ self.assertEqual(user_token_list, expected_user_token_list)
+ new_expired_data = (expired_token_id,
+ timeutils.isotime(
+ (timeutils.utcnow() - expire_delta),
+ subsecond=True))
+ self._update_user_token_index_direct(user_key, expired_token_id,
+ new_expired_data)
+ valid_token_id_2, valid_data_2 = self.create_token_sample_data(
+ user_id=user_id)
+ valid_token_ref_2 = self.token_api.get_token(valid_token_id_2)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (valid_token_id_2, timeutils.isotime(valid_token_ref_2['expires'],
+ subsecond=True))]
+ user_token_list = self.token_api.driver._store.get(user_key)
+ self.assertEqual(user_token_list, expected_user_token_list)
+
+ # Test that revoked tokens are removed from the list on create.
+ self.token_api.delete_token(valid_token_id_2)
+ new_token_id, data = self.create_token_sample_data(user_id=user_id)
+ new_token_ref = self.token_api.get_token(new_token_id)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (new_token_id, timeutils.isotime(new_token_ref['expires'],
+ subsecond=True))]
+ user_token_list = self.token_api.driver._store.get(user_key)
+ self.assertEqual(user_token_list, expected_user_token_list)
+
class KvsTrust(tests.TestCase, test_backend.TrustTests):
def setUp(self):
diff --git a/keystone/tests/test_backend_memcache.py b/keystone/tests/test_backend_memcache.py
index 0641bb7e3..5f92b83f4 100644
--- a/keystone/tests/test_backend_memcache.py
+++ b/keystone/tests/test_backend_memcache.py
@@ -114,7 +114,11 @@ class MemcacheClient(object):
class MemcacheToken(tests.TestCase, test_backend.TokenTests):
def setUp(self):
super(MemcacheToken, self).setUp()
+ # Use the memcache backend for the token driver.
+ self.opt_in_group('token',
+ driver='keystone.token.backends.memcache.Token')
self.load_backends()
+ # Override the memcache client with the "dummy" client.
fake_client = MemcacheClient()
self.token_man = token.Manager()
self.token_man.driver = token_memcache.Token(client=fake_client)
diff --git a/keystone/token/backends/kvs.py b/keystone/token/backends/kvs.py
index c3824b0bf..dbbf4f332 100644
--- a/keystone/token/backends/kvs.py
+++ b/keystone/token/backends/kvs.py
@@ -1,5 +1,6 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
+# -*- coding: utf-8 -*-
+# Copyright 2013 Metacloud, Inc.
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -14,56 +15,240 @@
# License for the specific language governing permissions and limitations
# under the License.
+from __future__ import absolute_import
import copy
from keystone.common import kvs
+from keystone import config
from keystone import exception
from keystone.openstack.common import log
from keystone.openstack.common import timeutils
from keystone import token
+
+CONF = config.CONF
LOG = log.getLogger(__name__)
-class Token(kvs.Base, token.Driver):
- """kvs backend for tokens is deprecated.
+class Token(token.Driver):
+ """KeyValueStore backend for tokens.
- Deprecated in Havana and will be removed in Icehouse, as this backend
- is not production grade.
+ This is the base implementation for any/all key-value-stores (e.g.
+ memcached) for the Token backend. It is recommended to only use the base
+ in-memory implementation for testing purposes.
"""
- def __init__(self, *args, **kw):
- super(Token, self).__init__(*args, **kw)
- LOG.warn(_("kvs token backend is DEPRECATED. Use "
- "keystone.token.backends.sql or "
- "keystone.token.backend.memcache instead."))
+ revocation_key = 'revocation-list'
+ kvs_backend = 'openstack.kvs.Memory'
+
+ def __init__(self, backing_store=None, **kwargs):
+ super(Token, self).__init__()
+ self._store = kvs.get_key_value_store('token-driver')
+ if backing_store is not None:
+ self.kvs_backend = backing_store
+ self._store.configure(backing_store=self.kvs_backend, **kwargs)
+ if self.__class__ == Token:
+ # NOTE(morganfainberg): Only warn if the base KVS implementation
+ # is instantiated.
+ LOG.warn(_('It is recommended to only use the base '
+ 'key-value-store implementation for the token driver '
+ 'for testing purposes. '
+ 'Please use keystone.token.backends.memcache.Token '
+ 'or keystone.token.backends.sql.Token instead.'))
+
+ def _prefix_token_id(self, token_id):
+ return 'token-%s' % token_id.encode('utf-8')
+
+ def _prefix_user_id(self, user_id):
+ return 'usertokens-%s' % user_id.encode('utf-8')
+
+ def _get_key_or_default(self, key, default=None):
+ try:
+ return self._store.get(key)
+ except exception.NotFound:
+ return default
+
+ def _get_key(self, key):
+ return self._store.get(key)
+
+ def _set_key(self, key, value, lock=None):
+ self._store.set(key, value, lock)
+
+ def _delete_key(self, key):
+ return self._store.delete(key)
- # Public interface
def get_token(self, token_id):
+ ptk = self._prefix_token_id(token_id)
try:
- ref = self.db.get('token-%s' % token_id)
- return copy.deepcopy(ref)
- except Exception:
- # On any issues here, Token is not found.
+ token_ref = self._get_key(ptk)
+ except exception.NotFound:
raise exception.TokenNotFound(token_id=token_id)
+ return token_ref
+
def create_token(self, token_id, data):
+ """Create a token by id and data.
+
+ It is assumed the caller has performed data validation on the "data"
+ parameter.
+ """
data_copy = copy.deepcopy(data)
- data_copy['id'] = token_id
+ ptk = self._prefix_token_id(token_id)
if not data_copy.get('expires'):
data_copy['expires'] = token.default_expire_time()
if not data_copy.get('user_id'):
data_copy['user_id'] = data_copy['user']['id']
- self.db.set('token-%s' % token_id, data_copy)
- return copy.deepcopy(data_copy)
+
+ # NOTE(morganfainberg): for ease of manipulating the data without
+ # concern about the backend, always store the value(s) in the
+ # index as the isotime (string) version so this is where the string is
+ # built.
+ expires_str = timeutils.isotime(data_copy['expires'], subsecond=True)
+
+ self._set_key(ptk, data_copy)
+ user_id = data['user']['id']
+ user_key = self._prefix_user_id(user_id)
+ self._update_user_token_list(user_key, token_id, expires_str)
+ if CONF.trust.enabled and data.get('trust_id'):
+ # NOTE(morganfainberg): If trusts are enabled and this is a trust
+ # scoped token, we add the token to the trustee list as well. This
+ # allows password changes of the trustee to also expire the token.
+ # There is no harm in placing the token in multiple lists, as
+ # _list_tokens is smart enough to handle almost any case of
+ # valid/invalid/expired for a given token.
+ token_data = data_copy['token_data']
+ if data_copy['token_version'] == token.provider.V2:
+ trustee_user_id = token_data['access']['trust'][
+ 'trustee_user_id']
+ elif data_copy['token_version'] == token.provider.V3:
+ trustee_user_id = token_data['OS-TRUST:trust'][
+ 'trustee_user_id']
+ else:
+ raise token.provider.UnsupportedTokenVersionException(
+ _('Unknown token version %s') %
+ data_copy.get('token_version'))
+
+ trustee_key = self._prefix_user_id(trustee_user_id)
+ self._update_user_token_list(trustee_key, token_id, expires_str)
+
+ return data_copy
+
+ def _get_user_token_list_with_expiry(self, user_key):
+ """Return a list of tuples in the format (token_id, token_expiry) for
+ the user_key.
+ """
+ return self._get_key_or_default(user_key, default=[])
+
+ def _get_user_token_list(self, user_key):
+ """Return a list of token_ids for the user_key."""
+ token_list = self._get_user_token_list_with_expiry(user_key)
+ # Each element is a tuple of (token_id, token_expiry). Most code does
+ # not care about the expiry, it is stripped out and only a
+ # list of token_ids are returned.
+ return [t[0] for t in token_list]
+
+ def _update_user_token_list(self, user_key, token_id, expires_isotime_str):
+ current_time = self._get_current_time()
+ revoked_token_list = set([t['id'] for t in
+ self.list_revoked_tokens()])
+
+ with self._store.get_lock(user_key) as lock:
+ filtered_list = []
+ token_list = self._get_user_token_list_with_expiry(user_key)
+ for item in token_list:
+ try:
+ item_id, expires = self._format_token_index_item(item)
+ except (ValueError, TypeError):
+ # NOTE(morganfainberg): Skip on expected errors
+ # possibilities from the `_format_token_index_item` method.
+ continue
+
+ if expires < current_time:
+ LOG.debug(_('Token `%(token_id)s` is expired, removing '
+ 'from `%(user_key)s`.'),
+ {'token_id': item_id, 'user_key': user_key})
+ continue
+
+ if item_id in revoked_token_list:
+ # NOTE(morganfainberg): If the token has been revoked, it
+ # can safely be removed from this list. This helps to keep
+ # the user_token_list as reasonably small as possible.
+ LOG.debug(_('Token `%(token_id)s` is revoked, removing '
+ 'from `%(user_key)s`.'),
+ {'token_id': item_id, 'user_key': user_key})
+ continue
+ filtered_list.append(item)
+ filtered_list.append((token_id, expires_isotime_str))
+ self._set_key(user_key, filtered_list, lock)
+ return filtered_list
+
+ def _get_current_time(self):
+ return timeutils.normalize_time(timeutils.utcnow())
+
+ def _add_to_revocation_list(self, data, lock):
+ filtered_list = []
+ revoked_token_data = {}
+
+ current_time = self._get_current_time()
+ expires = data['expires']
+
+ if isinstance(expires, basestring):
+ expires = timeutils.parse_isotime(expires)
+
+ expires = timeutils.normalize_time(expires)
+
+ if expires < current_time:
+ LOG.warning(_('Token `%s` is expired, not adding to the '
+ 'revocation list.'), data['id'])
+ return
+
+ revoked_token_data['expires'] = timeutils.isotime(expires,
+ subsecond=True)
+ revoked_token_data['id'] = data['id']
+
+ token_list = self._get_key_or_default(self.revocation_key, default=[])
+ if not isinstance(token_list, list):
+ # NOTE(morganfainberg): In the case that the revocation list is not
+ # in a format we understand, reinitialize it. This is an attempt to
+ # not allow the revocation list to be completely broken if
+ # somehow the key is changed outside of keystone (e.g. memcache
+ # that is shared by multiple applications). Logging occurs at error
+ # level so that the cloud administrators have some awareness that
+ # the revocation_list needed to be cleared out. In all, this should
+ # be recoverable. Keystone cannot control external applications
+ # from changing a key in some backends, however, it is possible to
+ # gracefully handle and notify of this event.
+ LOG.error(_('Reinitializing revocation list due to error '
+ 'in loading revocation list from backend. '
+ 'Expected `list` type got `%(type)s`. Old '
+ 'revocation list data: %(list)r'),
+ {'type': type(token_list), 'list': token_list})
+ token_list = []
+
+ # NOTE(morganfainberg): on revocation, cleanup the expired entries, try
+ # to keep the list of tokens revoked at the minimum.
+ for token_data in token_list:
+ try:
+ expires_at = timeutils.normalize_time(
+ timeutils.parse_isotime(token_data['expires']))
+ except ValueError:
+ LOG.warning(_('Removing `%s` from revocation list due to '
+ 'invalid expires data in revocation list.'),
+ token_data.get('id', 'INVALID_TOKEN_DATA'))
+ continue
+ if expires_at > current_time:
+ filtered_list.append(token_data)
+ filtered_list.append(revoked_token_data)
+ self._set_key(self.revocation_key, filtered_list, lock)
def delete_token(self, token_id):
- try:
- token_ref = self.get_token(token_id)
- self.db.delete('token-%s' % token_id)
- self.db.set('revoked-token-%s' % token_id, token_ref)
- except exception.NotFound:
- raise exception.TokenNotFound(token_id=token_id)
+ # Test for existence
+ with self._store.get_lock(self.revocation_key) as lock:
+ data = self.get_token(token_id)
+ ptk = self._prefix_token_id(token_id)
+ result = self._delete_key(ptk)
+ self._add_to_revocation_list(data, lock)
+ return result
def delete_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
@@ -74,90 +259,85 @@ class Token(kvs.Base, token.Driver):
consumer_id=consumer_id,
)
- def is_not_expired(self, now, ref):
- return not ref.get('expires') and ref.get('expires') < now
+ def _format_token_index_item(self, item):
+ try:
+ token_id, expires = item
+ except (TypeError, ValueError):
+ LOG.debug(_('Invalid token entry expected tuple of '
+ '`(<token_id>, <expires>)` got: `%(item)r`'),
+ dict(item=item))
+ raise
- def is_expired(self, now, ref):
- return ref.get('expires') and ref.get('expires') < now
+ try:
+ expires = timeutils.normalize_time(
+ timeutils.parse_isotime(expires))
+ except ValueError:
+ LOG.debug(_('Invalid expires time on token `%(token_id)s`:'
+ ' %(expires)r'),
+ dict(token_id=token_id, expires=expires))
+ raise
+ return token_id, expires
- def trust_matches(self, trust_id, ref):
- return ref.get('trust_id') and ref['trust_id'] == trust_id
+ def _token_match_tenant(self, token_ref, tenant_id):
+ if token_ref.get('tenant'):
+ return token_ref['tenant'].get('id') == tenant_id
+ return False
- def _list_tokens_for_trust(self, trust_id):
- tokens = []
- now = timeutils.utcnow()
- for token, ref in self.db.items():
- if not token.startswith('token-') or self.is_expired(now, ref):
- continue
- if self.trust_matches(trust_id, ref):
- tokens.append(token.split('-', 1)[1])
- return tokens
+ def _token_match_trust(self, token_ref, trust_id):
+ if not token_ref.get('trust_id'):
+ return False
+ return token_ref['trust_id'] == trust_id
- def _consumer_matches(self, consumer_id, token_ref_dict):
- if consumer_id is None:
- return True
- else:
- if 'token_data' in token_ref_dict:
- token_data = token_ref_dict.get('token_data')
- if 'token' in token_data:
- token = token_data.get('token')
- oauth = token.get('OS-OAUTH1')
- if oauth and oauth.get('consumer_id') == consumer_id:
- return True
+ def _token_match_consumer(self, token_ref, consumer_id):
+ try:
+ oauth = token_ref['token_data']['token']['OS-OAUTH1']
+ return oauth.get('consumer_id') == consumer_id
+ except KeyError:
return False
- def _list_tokens_for_consumer(self, consumer_id):
+ def _list_tokens(self, user_id, tenant_id=None, trust_id=None,
+ consumer_id=None):
tokens = []
- now = timeutils.utcnow()
- for token, ref in self.db.items():
- if not token.startswith('token-') or self.is_expired(now, ref):
+ user_key = self._prefix_user_id(user_id)
+ token_list = self._get_user_token_list_with_expiry(user_key)
+ current_time = self._get_current_time()
+ for item in token_list:
+ try:
+ token_id, expires = self._format_token_index_item(item)
+ except (TypeError, ValueError):
+ # NOTE(morganfainberg): Skip on expected error possibilities
+ # from the `_format_token_index_item` method.
continue
- if self._consumer_matches(consumer_id, ref):
- tokens.append(token.split('-', 1)[1])
- return tokens
-
- def _list_tokens_for_user(self, user_id, tenant_id=None):
- def user_matches(user_id, ref):
- return ref.get('user') and ref['user'].get('id') == user_id
- def tenant_matches(tenant_id, ref):
- return ((tenant_id is None) or
- (ref.get('tenant') and
- ref['tenant'].get('id') == tenant_id))
+ if expires < current_time:
+ continue
- tokens = []
- now = timeutils.utcnow()
- for token, ref in self.db.items():
- if not token.startswith('token-') or self.is_expired(now, ref):
+ try:
+ token_ref = self.get_token(token_id)
+ except exception.TokenNotFound:
+ # NOTE(morganfainberg): Token doesn't exist, skip it.
continue
- else:
- if (user_matches(user_id, ref) and
- tenant_matches(tenant_id, ref)):
- tokens.append(token.split('-', 1)[1])
- return tokens
+ if token_ref:
+ if tenant_id is not None:
+ if not self._token_match_tenant(token_ref, tenant_id):
+ continue
+ if trust_id is not None:
+ if not self._token_match_trust(token_ref, trust_id):
+ continue
+ if consumer_id is not None:
+ if not self._token_match_consumer(token_ref, consumer_id):
+ continue
- def _list_tokens(self, user_id, tenant_id=None, trust_id=None,
- consumer_id=None):
- if trust_id:
- return self._list_tokens_for_trust(trust_id)
- if consumer_id:
- return self._list_tokens_for_consumer(consumer_id)
- else:
- return self._list_tokens_for_user(user_id, tenant_id)
+ tokens.append(token_id)
+ return tokens
def list_revoked_tokens(self):
- tokens = []
- for token, token_ref in self.db.items():
- if not token.startswith('revoked-token-'):
- continue
- record = {}
- record['id'] = token_ref['id']
- record['expires'] = token_ref['expires']
- tokens.append(record)
- return tokens
+ revoked_token_list = self._get_key_or_default(self.revocation_key,
+ default=[])
+ if isinstance(revoked_token_list, list):
+ return revoked_token_list
+ return []
def flush_expired_tokens(self):
- now = timeutils.utcnow()
- for token, token_ref in self.db.items():
- if self.is_expired(now, token_ref):
- self.db.delete(token)
+ """Archive or delete tokens that have expired."""
+ raise exception.NotImplemented()
diff --git a/keystone/token/core.py b/keystone/token/core.py
index a37b35420..2c0aec1fb 100644
--- a/keystone/token/core.py
+++ b/keystone/token/core.py
@@ -129,6 +129,11 @@ class Manager(manager.Manager):
raise exception.TokenNotFound(token_id=token_id)
def get_token(self, token_id):
+ if not token_id:
+ # NOTE(morganfainberg): There are cases when the
+ # context['token_id'] will in-fact be None. This also saves
+ # a round-trip to the backend if we don't have a token_id.
+ raise exception.TokenNotFound(token_id='')
unique_id = self.unique_id(token_id)
token_ref = self._get_token(unique_id)
# NOTE(morganfainberg): Lift expired checking to the manager, there is