Remove padding from Fernet tokens
Fernet tokens were previously percent encoded. This can cause issues with clients doing their own encoding or not. By removing the padding and then re-establishing it when we validate the token, we don't present that problem to clients. This also shortens the length of a Fernet token. Change-Id: I674bad86ccc9027ac3b365c10b3b142fc9d73c17 Related-Bug: 1433372 Closes-Bug: 1491926
This commit is contained in:
parent
21c83219af
commit
f3e3a653f9
|
@ -2401,7 +2401,7 @@ class FernetFederatedTokenTests(FederationTests, FederatedSetupMixin):
|
||||||
def test_federated_unscoped_token_with_multiple_groups(self):
|
def test_federated_unscoped_token_with_multiple_groups(self):
|
||||||
assertion = 'ANOTHER_CUSTOMER_ASSERTION'
|
assertion = 'ANOTHER_CUSTOMER_ASSERTION'
|
||||||
resp = self._issue_unscoped_token(assertion=assertion)
|
resp = self._issue_unscoped_token(assertion=assertion)
|
||||||
self.assertEqual(232, len(resp.headers['X-Subject-Token']))
|
self.assertEqual(226, len(resp.headers['X-Subject-Token']))
|
||||||
self.assertValidMappedUser(resp.json_body['token'])
|
self.assertValidMappedUser(resp.json_body['token'])
|
||||||
|
|
||||||
def test_validate_federated_unscoped_token(self):
|
def test_validate_federated_unscoped_token(self):
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
@ -56,6 +57,23 @@ class TestFernetTokenProvider(unit.TestCase):
|
||||||
uuid.uuid4().hex)
|
uuid.uuid4().hex)
|
||||||
|
|
||||||
|
|
||||||
|
class TestTokenFormatter(unit.TestCase):
|
||||||
|
def test_restore_padding(self):
|
||||||
|
# 'a' will result in '==' padding, 'aa' will result in '=' padding, and
|
||||||
|
# 'aaa' will result in no padding.
|
||||||
|
strings_to_test = ['a', 'aa', 'aaa']
|
||||||
|
|
||||||
|
for string in strings_to_test:
|
||||||
|
encoded_string = base64.urlsafe_b64encode(string)
|
||||||
|
encoded_str_without_padding = encoded_string.rstrip('=')
|
||||||
|
self.assertFalse(encoded_str_without_padding.endswith('='))
|
||||||
|
encoded_str_with_padding_restored = (
|
||||||
|
token_formatters.TokenFormatter.restore_padding(
|
||||||
|
encoded_str_without_padding)
|
||||||
|
)
|
||||||
|
self.assertEqual(encoded_string, encoded_str_with_padding_restored)
|
||||||
|
|
||||||
|
|
||||||
class TestPayloads(unit.TestCase):
|
class TestPayloads(unit.TestCase):
|
||||||
def test_uuid_hex_to_byte_conversions(self):
|
def test_uuid_hex_to_byte_conversions(self):
|
||||||
payload_cls = token_formatters.BasePayload
|
payload_cls = token_formatters.BasePayload
|
||||||
|
|
|
@ -21,7 +21,7 @@ from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
import six
|
import six
|
||||||
from six.moves import map, urllib
|
from six.moves import map
|
||||||
|
|
||||||
from keystone.auth import plugins as auth_plugins
|
from keystone.auth import plugins as auth_plugins
|
||||||
from keystone.common import utils as ks_utils
|
from keystone.common import utils as ks_utils
|
||||||
|
@ -67,11 +67,14 @@ class TokenFormatter(object):
|
||||||
def pack(self, payload):
|
def pack(self, payload):
|
||||||
"""Pack a payload for transport as a token."""
|
"""Pack a payload for transport as a token."""
|
||||||
# base64 padding (if any) is not URL-safe
|
# base64 padding (if any) is not URL-safe
|
||||||
return urllib.parse.quote(self.crypto.encrypt(payload))
|
return self.crypto.encrypt(payload).rstrip('=')
|
||||||
|
|
||||||
def unpack(self, token):
|
def unpack(self, token):
|
||||||
"""Unpack a token, and validate the payload."""
|
"""Unpack a token, and validate the payload."""
|
||||||
token = urllib.parse.unquote(six.binary_type(token))
|
token = six.binary_type(token)
|
||||||
|
|
||||||
|
# Restore padding on token before decoding it
|
||||||
|
token = TokenFormatter.restore_padding(token)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.crypto.decrypt(token)
|
return self.crypto.decrypt(token)
|
||||||
|
@ -79,6 +82,21 @@ class TokenFormatter(object):
|
||||||
raise exception.ValidationError(
|
raise exception.ValidationError(
|
||||||
_('This is not a recognized Fernet token'))
|
_('This is not a recognized Fernet token'))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def restore_padding(cls, token):
|
||||||
|
"""Restore padding based on token size.
|
||||||
|
|
||||||
|
:param token: token to restore padding on
|
||||||
|
:returns: token with correct padding
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Re-inflate the padding
|
||||||
|
mod_returned = len(token) % 4
|
||||||
|
if mod_returned:
|
||||||
|
missing_padding = 4 - mod_returned
|
||||||
|
token += b'=' * missing_padding
|
||||||
|
return token
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def creation_time(cls, fernet_token):
|
def creation_time(cls, fernet_token):
|
||||||
"""Returns the creation time of a valid Fernet token."""
|
"""Returns the creation time of a valid Fernet token."""
|
||||||
|
@ -86,10 +104,10 @@ class TokenFormatter(object):
|
||||||
# (pypi/cryptography will refuse to operate on Unicode input)
|
# (pypi/cryptography will refuse to operate on Unicode input)
|
||||||
fernet_token = six.binary_type(fernet_token)
|
fernet_token = six.binary_type(fernet_token)
|
||||||
|
|
||||||
# the base64 padding on fernet tokens is made URL-safe
|
# Restore padding on token before decoding it
|
||||||
fernet_token = urllib.parse.unquote(fernet_token)
|
fernet_token = TokenFormatter.restore_padding(fernet_token)
|
||||||
|
|
||||||
# fernet tokens are base64 encoded and the padding made URL-safe
|
# fernet tokens are base64 encoded, so we need to unpack them first
|
||||||
token_bytes = base64.urlsafe_b64decode(fernet_token)
|
token_bytes = base64.urlsafe_b64decode(fernet_token)
|
||||||
|
|
||||||
# slice into the byte array to get just the timestamp
|
# slice into the byte array to get just the timestamp
|
||||||
|
|
Loading…
Reference in New Issue