Remove Api V1 Support-3

Remove unit tests for V1 apis.

Change-Id: I3acfc43519286a0550dee0c94a194eb4f1c2b849
This commit is contained in:
hwang 2025-01-07 12:30:00 -08:00
parent c982ee6586
commit a68f38b6e9
11 changed files with 0 additions and 1936 deletions

View File

@ -1,43 +0,0 @@
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Auth."""
import falcon
from falcon import testing
from keystonemiddleware import auth_token
from oslo_utils import uuidutils
from zaqar.tests.unit.transport.wsgi import base
class TestAuth(base.V1Base):
config_file = 'keystone_auth.conf'
def setUp(self):
super(TestAuth, self).setUp()
self.headers = {'Client-ID': uuidutils.generate_uuid()}
def test_auth_install(self):
self.assertIsInstance(self.app._auth_app, auth_token.AuthProtocol)
def test_non_authenticated(self):
env = testing.create_environ(self.url_prefix + '/480924/queues/',
method='GET',
headers=self.headers)
self.app(env, self.srmock)
self.assertEqual(falcon.HTTP_401, self.srmock.status)

View File

@ -1,257 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import ddt
import falcon
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from testtools import matchers
from zaqar import tests as testing
from zaqar.tests.unit.transport.wsgi import base
@ddt.ddt
class TestClaimsMongoDB(base.V1Base):
config_file = 'wsgi_mongodb.conf'
@testing.requires_mongodb
def setUp(self):
super(TestClaimsMongoDB, self).setUp()
self.project_id = '480924'
self.queue_path = self.url_prefix + '/queues/fizbit'
self.claims_path = self.queue_path + '/claims'
self.messages_path = self.queue_path + '/messages'
doc = '{"_ttl": 60}'
self.simulate_put(self.queue_path, self.project_id, body=doc)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
doc = jsonutils.dumps([{'body': 239, 'ttl': 300}] * 10)
self.simulate_post(self.queue_path + '/messages', self.project_id,
body=doc, headers={'Client-ID':
uuidutils.generate_uuid()})
self.assertEqual(falcon.HTTP_201, self.srmock.status)
def tearDown(self):
storage = self.boot.storage._storage
control = self.boot.control
connection = storage.connection
connection.drop_database(control.queues_database)
for db in storage.message_databases:
connection.drop_database(db)
self.simulate_delete(self.queue_path, self.project_id)
super(TestClaimsMongoDB, self).tearDown()
@ddt.data(None, '[', '[]', '{}', '.', '"fail"')
def test_bad_claim(self, doc):
self.simulate_post(self.claims_path, self.project_id, body=doc)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
href = self._get_a_claim()
self.simulate_patch(href, self.project_id, body=doc)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_exceeded_claim(self):
self.simulate_post(self.claims_path, self.project_id,
body='{"ttl": 100, "grace": 60}',
query_string='limit=21')
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data((-1, -1), (59, 60), (60, 59), (60, 43201), (43201, 60))
def test_unacceptable_ttl_or_grace(self, ttl_grace):
ttl, grace = ttl_grace
self.simulate_post(self.claims_path, self.project_id,
body=jsonutils.dumps({'ttl': ttl, 'grace': grace}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 59, 43201)
def test_unacceptable_new_ttl(self, ttl):
href = self._get_a_claim()
self.simulate_patch(href, self.project_id,
body=jsonutils.dumps({'ttl': ttl}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def _get_a_claim(self):
doc = '{"ttl": 100, "grace": 60}'
self.simulate_post(self.claims_path, self.project_id, body=doc)
return self.srmock.headers_dict['Location']
def test_lifecycle(self):
doc = '{"ttl": 100, "grace": 60}'
# First, claim some messages
body = self.simulate_post(self.claims_path, self.project_id, body=doc)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
claimed = jsonutils.loads(body[0])
claim_href = self.srmock.headers_dict['Location']
message_href, params = claimed[0]['href'].split('?')
# No more messages to claim
self.simulate_post(self.claims_path, self.project_id, body=doc,
query_string='limit=3')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
headers = {
'Client-ID': uuidutils.generate_uuid(),
}
# Listing messages, by default, won't include claimed
body = self.simulate_get(self.messages_path, self.project_id,
headers=headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Include claimed messages this time
body = self.simulate_get(self.messages_path, self.project_id,
query_string='include_claimed=true',
headers=headers)
listed = jsonutils.loads(body[0])
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(len(claimed), len(listed['messages']))
now = timeutils.utcnow() + datetime.timedelta(seconds=10)
timeutils_utcnow = 'oslo_utils.timeutils.utcnow'
with mock.patch(timeutils_utcnow) as mock_utcnow:
mock_utcnow.return_value = now
body = self.simulate_get(claim_href, self.project_id)
claim = jsonutils.loads(body[0])
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(claim_href,
self.srmock.headers_dict['Content-Location'])
self.assertEqual(100, claim['ttl'])
# NOTE(cpp-cabrera): verify that claim age is non-negative
self.assertThat(claim['age'], matchers.GreaterThan(-1))
# Try to delete the message without submitting a claim_id
self.simulate_delete(message_href, self.project_id)
self.assertEqual(falcon.HTTP_403, self.srmock.status)
# Delete the message and its associated claim
self.simulate_delete(message_href, self.project_id,
query_string=params)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Try to get it from the wrong project
self.simulate_get(message_href, 'bogus_project', query_string=params)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Get the message
self.simulate_get(message_href, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Update the claim
new_claim_ttl = '{"ttl": 60}'
creation = timeutils.utcnow()
self.simulate_patch(claim_href, self.project_id, body=new_claim_ttl)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Get the claimed messages (again)
body = self.simulate_get(claim_href, self.project_id)
query = timeutils.utcnow()
claim = jsonutils.loads(body[0])
message_href, params = claim['messages'][0]['href'].split('?')
self.assertEqual(60, claim['ttl'])
estimated_age = timeutils.delta_seconds(creation, query)
self.assertGreater(estimated_age, claim['age'])
# Delete the claim
self.simulate_delete(claim['href'], 'bad_id')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.simulate_delete(claim['href'], self.project_id)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Try to delete a message with an invalid claim ID
self.simulate_delete(message_href, self.project_id,
query_string=params)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
# Make sure it wasn't deleted!
self.simulate_get(message_href, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
# Try to get a claim that doesn't exist
self.simulate_get(claim['href'])
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Try to update a claim that doesn't exist
self.simulate_patch(claim['href'], body=doc)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_post_claim_nonexistent_queue(self):
path = self.url_prefix + '/queues/nonexistent/claims'
self.simulate_post(path, self.project_id,
body='{"ttl": 100, "grace": 60}')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_get_claim_nonexistent_queue(self):
path = self.url_prefix + '/queues/nonexistent/claims/aaabbbba'
self.simulate_get(path)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# NOTE(cpp-cabrera): regression test against bug #1203842
def test_get_nonexistent_claim_404s(self):
self.simulate_get(self.claims_path + '/a')
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_delete_nonexistent_claim_204s(self):
self.simulate_delete(self.claims_path + '/a')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_patch_nonexistent_claim_404s(self):
patch_data = jsonutils.dumps({'ttl': 100})
self.simulate_patch(self.claims_path + '/a', body=patch_data)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
class TestClaimsFaultyDriver(base.V1BaseFaulty):
config_file = 'wsgi_faulty.conf'
def test_simple(self):
project_id = '480924'
claims_path = self.url_prefix + '/queues/fizbit/claims'
doc = '{"ttl": 100, "grace": 60}'
self.simulate_post(claims_path, project_id, body=doc)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_get(claims_path + '/nichts', project_id)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_patch(claims_path + '/nichts', project_id, body=doc)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_delete(claims_path + '/foo', project_id)
self.assertEqual(falcon.HTTP_503, self.srmock.status)

View File

@ -1,99 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import falcon
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from zaqar import storage
from zaqar.tests.unit.transport.wsgi import base
class TestDefaultLimits(base.V1Base):
config_file = 'wsgi_mongodb_default_limits.conf'
def setUp(self):
super(TestDefaultLimits, self).setUp()
self.queue_path = self.url_prefix + '/queues'
self.q1_queue_path = self.queue_path + '/' + uuidutils.generate_uuid()
self.messages_path = self.q1_queue_path + '/messages'
self.claims_path = self.q1_queue_path + '/claims'
self.simulate_put(self.q1_queue_path)
def tearDown(self):
self.simulate_delete(self.queue_path)
super(TestDefaultLimits, self).tearDown()
def test_queue_listing(self):
# 2 queues to list
self.addCleanup(self.simulate_delete, self.queue_path + '/q2')
self.simulate_put(self.queue_path + '/q2')
self.assertEqual(falcon.HTTP_201, self.srmock.status)
with self._prepare_queues(storage.DEFAULT_QUEUES_PER_PAGE + 1):
result = self.simulate_get(self.queue_path)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
queues = jsonutils.loads(result[0])['queues']
self.assertEqual(storage.DEFAULT_QUEUES_PER_PAGE, len(queues))
def test_message_listing(self):
self._prepare_messages(storage.DEFAULT_MESSAGES_PER_PAGE + 1)
result = self.simulate_get(self.messages_path,
headers={'Client-ID':
uuidutils.generate_uuid()})
self.assertEqual(falcon.HTTP_200, self.srmock.status)
messages = jsonutils.loads(result[0])['messages']
self.assertEqual(storage.DEFAULT_MESSAGES_PER_PAGE, len(messages))
def test_claim_creation(self):
self._prepare_messages(storage.DEFAULT_MESSAGES_PER_CLAIM + 1)
result = self.simulate_post(self.claims_path,
body='{"ttl": 60, "grace": 60}')
self.assertEqual(falcon.HTTP_201, self.srmock.status)
messages = jsonutils.loads(result[0])
self.assertEqual(storage.DEFAULT_MESSAGES_PER_CLAIM, len(messages))
@contextlib.contextmanager
def _prepare_queues(self, count):
queue_paths = [self.queue_path + '/multi-{0}'.format(i)
for i in range(count)]
for path in queue_paths:
self.simulate_put(path)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
yield
for path in queue_paths:
self.simulate_delete(path)
def _prepare_messages(self, count):
doc = jsonutils.dumps([{'body': 239, 'ttl': 300}] * count)
self.simulate_post(self.messages_path, body=doc,
headers={'Client-ID': uuidutils.generate_uuid()})
self.assertEqual(falcon.HTTP_201, self.srmock.status)

View File

@ -1,33 +0,0 @@
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import falcon
from zaqar.tests.unit.transport.wsgi import base
class TestHealth(base.V1Base):
config_file = 'wsgi_mongodb.conf'
def test_get(self):
response = self.simulate_get('/v1/health')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual([], response)
def test_head(self):
response = self.simulate_head('/v1/health')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual([], response)

View File

@ -1,57 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import falcon
from oslo_serialization import jsonutils
from urllib import parse as urlparse
from zaqar.tests.unit.transport.wsgi import base
class TestHomeDocument(base.V1Base):
config_file = 'wsgi_mongodb.conf'
def test_json_response(self):
body = self.simulate_get(self.url_prefix + '/')
self.assertEqual(falcon.HTTP_200, self.srmock.status)
content_type = self.srmock.headers_dict['Content-Type']
self.assertEqual('application/json-home', content_type)
try:
jsonutils.loads(body[0])
except ValueError:
self.fail('Home document is not valid JSON')
def test_href_template(self):
body = self.simulate_get(self.url_prefix + '/')
self.assertEqual(falcon.HTTP_200, self.srmock.status)
resp = jsonutils.loads(body[0])
queue_href_template = resp['resources']['rel/queue']['href-template']
path_1 = 'https://zaqar.example.com' + self.url_prefix
path_2 = 'https://zaqar.example.com' + self.url_prefix + '/'
# Verify all the href template start with the correct version prefix
for resource in list(resp['resources']):
self.assertTrue(resp['resources'][resource]['href-template'].
startswith(self.url_prefix))
url = urlparse.urljoin(path_1, queue_href_template)
expected = ('https://zaqar.example.com' + self.url_prefix +
'/queues/foo')
self.assertEqual(expected, url.format(queue_name='foo'))
url = urlparse.urljoin(path_2, queue_href_template)
self.assertEqual(expected, url.format(queue_name='foo'))

View File

@ -1,82 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import falcon
from falcon import testing
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from zaqar.tests.unit.transport.wsgi import base
class TestMediaType(base.V1Base):
config_file = 'wsgi_mongodb.conf'
def test_json_only_endpoints_with_wrong_accept_header(self):
endpoints = (
('GET', self.url_prefix + '/queues'),
('GET', self.url_prefix + '/queues/nonexistent/metadata'),
('GET', self.url_prefix + '/queues/nonexistent/stats'),
('POST', self.url_prefix + '/queues/nonexistent/messages'),
('GET', self.url_prefix + '/queues/nonexistent/messages/deadbeaf'),
('POST', self.url_prefix + '/queues/nonexistent/claims'),
('GET', self.url_prefix + '/queues/nonexistent/claims/0ad'),
('GET', self.url_prefix + '/health'),
)
for method, endpoint in endpoints:
headers = {
'Client-ID': uuidutils.generate_uuid(),
'Accept': 'application/xml',
}
env = testing.create_environ(endpoint,
method=method,
headers=headers)
self.app(env, self.srmock)
self.assertEqual(falcon.HTTP_406, self.srmock.status)
def test_request_with_body_and_urlencoded_contenttype_header_fails(self):
# NOTE(Eva-i): this test case makes sure wsgi 'before' hook
# "require_content_type_be_non_urlencoded" works to prevent
# bug/1547100.
eww_queue_path = self.url_prefix + '/queues/eww'
eww_queue_messages_path = eww_queue_path + '/messages'
sample_message = jsonutils.dumps([{'body': {'eww!'}, 'ttl': 200}])
bad_headers = {
'Client-ID': uuidutils.generate_uuid(),
'Content-Type': 'application/x-www-form-urlencoded',
}
# Create queue request with bad headers. Should still work, because it
# has no body.
self.simulate_put(eww_queue_path, headers=bad_headers)
self.addCleanup(self.simulate_delete, eww_queue_path,
headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
# Post message request with good headers. Should work.
self.simulate_post(eww_queue_messages_path, body=sample_message,
headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
# Post message request with bad headers. Should not work.
self.simulate_post(eww_queue_messages_path, body=sample_message,
headers=bad_headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)

View File

@ -1,505 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import ddt
import falcon
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from testtools import matchers
from zaqar import tests as testing
from zaqar.tests.unit.transport.wsgi import base
from zaqar.transport import validation
@ddt.ddt
class TestMessagesMongoDB(base.V1Base):
config_file = 'wsgi_mongodb.conf'
@testing.requires_mongodb
def setUp(self):
super(TestMessagesMongoDB, self).setUp()
if self.conf.pooling:
for i in range(4):
uri = "%s/%s" % (self.mongodb_url, str(i))
doc = {'weight': 100, 'uri': uri}
self.simulate_put(self.url_prefix + '/pools/' + str(i),
body=jsonutils.dumps(doc))
self.assertEqual(falcon.HTTP_201, self.srmock.status)
self.project_id = '7e55e1a7e'
# TODO(kgriffs): Add support in self.simulate_* for a "base path"
# so that we don't have to concatenate against self.url_prefix
# all over the place.
self.queue_path = self.url_prefix + '/queues/fizbit'
self.messages_path = self.queue_path + '/messages'
doc = '{"_ttl": 60}'
self.simulate_put(self.queue_path, self.project_id, body=doc)
# NOTE(kgriffs): Also register without a project for tests
# that do not specify a project.
#
# TODO(kgriffs): Should a project id always be required or
# automatically supplied in the simulate_* methods?
self.simulate_put(self.queue_path, body=doc)
self.headers = {
'Client-ID': uuidutils.generate_uuid(),
}
def tearDown(self):
self.simulate_delete(self.queue_path, self.project_id)
if self.conf.pooling:
for i in range(4):
self.simulate_delete(self.url_prefix + '/pools/' + str(i))
super(TestMessagesMongoDB, self).tearDown()
def _test_post(self, sample_messages):
sample_doc = jsonutils.dumps(sample_messages)
result = self.simulate_post(self.messages_path, self.project_id,
body=sample_doc, headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
result_doc = jsonutils.loads(result[0])
msg_ids = self._get_msg_ids(self.srmock.headers_dict)
self.assertEqual(len(sample_messages), len(msg_ids))
expected_resources = [str(self.messages_path + '/' + id)
for id in msg_ids]
self.assertEqual(expected_resources, result_doc['resources'])
# NOTE(kgriffs): As of the Icehouse release, drivers are
# required to either completely succeed, or completely fail
# to enqueue the entire batch of messages.
self.assertFalse(result_doc['partial'])
self.assertEqual(len(sample_messages), len(msg_ids))
lookup = dict([(m['ttl'], m['body']) for m in sample_messages])
# Test GET on the message resource directly
# NOTE(cpp-cabrera): force the passing of time to age a message
timeutils_utcnow = 'oslo_utils.timeutils.utcnow'
now = timeutils.utcnow() + datetime.timedelta(seconds=10)
with mock.patch(timeutils_utcnow) as mock_utcnow:
mock_utcnow.return_value = now
for msg_id in msg_ids:
message_uri = self.messages_path + '/' + msg_id
# Wrong project ID
self.simulate_get(message_uri, '777777')
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Correct project ID
result = self.simulate_get(message_uri, self.project_id)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(message_uri,
self.srmock.headers_dict['Content-Location'])
# Check message properties
message = jsonutils.loads(result[0])
self.assertEqual(message_uri, message['href'])
self.assertEqual(lookup[message['ttl']], message['body'])
# no negative age
# NOTE(cpp-cabrera): testtools lacks GreaterThanEqual on py26
self.assertThat(message['age'],
matchers.GreaterThan(-1))
# Test bulk GET
query_string = 'ids=' + ','.join(msg_ids)
result = self.simulate_get(self.messages_path, self.project_id,
query_string=query_string)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
result_doc = jsonutils.loads(result[0])
expected_ttls = set(m['ttl'] for m in sample_messages)
actual_ttls = set(m['ttl'] for m in result_doc)
self.assertFalse(expected_ttls - actual_ttls)
def test_exceeded_payloads(self):
# Get a valid message id
self._post_messages(self.messages_path)
msg_id = self._get_msg_id(self.srmock.headers_dict)
# Bulk GET restriction
query_string = 'ids=' + ','.join([msg_id] * 21)
self.simulate_get(self.messages_path, self.project_id,
query_string=query_string)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
# Listing restriction
self.simulate_get(self.messages_path, self.project_id,
query_string='limit=21',
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
# Bulk deletion restriction
query_string = 'ids=' + ','.join([msg_id] * 22)
self.simulate_delete(self.messages_path, self.project_id,
query_string=query_string)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_post_single(self):
sample_messages = [
{'body': {'key': 'value'}, 'ttl': 200},
]
self._test_post(sample_messages)
def test_post_multiple(self):
sample_messages = [
{'body': 239, 'ttl': 100},
{'body': {'key': 'value'}, 'ttl': 200},
{'body': [1, 3], 'ttl': 300},
]
self._test_post(sample_messages)
def test_post_to_non_ascii_queue(self):
# NOTE(kgriffs): This test verifies that routes with
# embedded queue name params go through the validation
# hook, regardless of the target resource.
path = self.url_prefix + '/queues/non-ascii-n\u0153me/messages'
self._post_messages(path)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_post_with_long_queue_name(self):
# NOTE(kgriffs): This test verifies that routes with
# embedded queue name params go through the validation
# hook, regardless of the target resource.
queues_path = self.url_prefix + '/queues/'
game_title = 'v' * validation.QUEUE_NAME_MAX_LEN
self._post_messages(queues_path + game_title + '/messages')
self.assertEqual(falcon.HTTP_404, self.srmock.status)
game_title += 'v'
self._post_messages(queues_path + game_title + '/messages')
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_post_to_missing_queue(self):
self._post_messages(self.url_prefix + '/queues/nonexistent/messages')
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_get_from_missing_queue(self):
self.simulate_get(self.url_prefix + '/queues/nonexistent/messages',
self.project_id,
headers={'Client-ID':
'dfcd3238-425c-11e3-8a80-28cfe91478b9'})
self.assertEqual(falcon.HTTP_204, self.srmock.status)
@ddt.data('', '0xdeadbeef', '550893e0-2b6e-11e3-835a-5cf9dd72369')
def test_bad_client_id(self, text_id):
self.simulate_post(self.queue_path + '/messages',
body='{"ttl": 60, "body": ""}',
headers={'Client-ID': text_id})
self.assertEqual(falcon.HTTP_400, self.srmock.status)
self.simulate_get(self.queue_path + '/messages',
query_string='limit=3&echo=true',
headers={'Client-ID': text_id})
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(None, '[', '[]', '{}', '.')
def test_post_bad_message(self, document):
self.simulate_post(self.queue_path + '/messages',
body=document,
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 59, 1209601)
def test_unacceptable_ttl(self, ttl):
self.simulate_post(self.queue_path + '/messages',
body=jsonutils.dumps([{'ttl': ttl, 'body': None}]),
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_exceeded_message_posting(self):
# Total (raw request) size
doc = jsonutils.dumps([{'body': "some body", 'ttl': 100}] * 20,
indent=4)
max_len = self.transport_cfg.max_messages_post_size
long_doc = doc + (' ' * (max_len - len(doc) + 1))
self.simulate_post(self.queue_path + '/messages',
body=long_doc,
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data('{"overflow": 9223372036854775808}',
'{"underflow": -9223372036854775809}')
def test_unsupported_json(self, document):
self.simulate_post(self.queue_path + '/messages',
body=document,
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_delete(self):
self._post_messages(self.messages_path)
msg_id = self._get_msg_id(self.srmock.headers_dict)
target = self.messages_path + '/' + msg_id
self.simulate_get(target, self.project_id)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.simulate_delete(target, self.project_id)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.simulate_get(target, self.project_id)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Safe to delete non-existing ones
self.simulate_delete(target, self.project_id)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_bulk_delete(self):
path = self.queue_path + '/messages'
self._post_messages(path, repeat=5)
[target, params] = self.srmock.headers_dict['location'].split('?')
# Deleting the whole collection is denied
self.simulate_delete(path, self.project_id)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
self.simulate_delete(target, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.simulate_get(target, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Safe to delete non-existing ones
self.simulate_delete(target, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Even after the queue is gone
self.simulate_delete(self.queue_path, self.project_id)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.simulate_delete(target, self.project_id, query_string=params)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_list(self):
path = self.queue_path + '/messages'
self._post_messages(path, repeat=10)
query_string = 'limit=3&echo=true'
body = self.simulate_get(path, self.project_id,
query_string=query_string,
headers=self.headers)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(path + '?' + query_string,
self.srmock.headers_dict['Content-Location'])
cnt = 0
while self.srmock.status == falcon.HTTP_200:
contents = jsonutils.loads(body[0])
[target, params] = contents['links'][0]['href'].split('?')
for msg in contents['messages']:
self.simulate_get(msg['href'], self.project_id)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
body = self.simulate_get(target, self.project_id,
query_string=params,
headers=self.headers)
cnt += 1
self.assertEqual(4, cnt)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Stats
body = self.simulate_get(self.queue_path + '/stats', self.project_id)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
message_stats = jsonutils.loads(body[0])['messages']
self.assertEqual(self.queue_path + '/stats',
self.srmock.headers_dict['Content-Location'])
# NOTE(kgriffs): The other parts of the stats are tested
# in tests.storage.base and so are not repeated here.
expected_pattern = self.queue_path + '/messages/[^/]+$'
for message_stat_name in ('oldest', 'newest'):
self.assertThat(message_stats[message_stat_name]['href'],
matchers.MatchesRegex(expected_pattern))
# NOTE(kgriffs): Try to get messages for a missing queue
self.simulate_get(self.url_prefix + '/queues/nonexistent/messages',
self.project_id,
headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_list_with_bad_marker(self):
path = self.queue_path + '/messages'
self._post_messages(path, repeat=5)
query_string = 'limit=3&echo=true&marker=sfhlsfdjh2048'
self.simulate_get(path, self.project_id,
query_string=query_string,
headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_no_uuid(self):
path = self.queue_path + '/messages'
self.simulate_post(path, '7e7e7e',
headers={},
body='[{"body": 0, "ttl": 100}]')
self.assertEqual(falcon.HTTP_400, self.srmock.status)
self.simulate_get(path, '7e7e7e', headers={})
self.assertEqual(falcon.HTTP_400, self.srmock.status)
# NOTE(cpp-cabrera): regression test against bug #1210633
def test_when_claim_deleted_then_messages_unclaimed(self):
path = self.queue_path
self._post_messages(path + '/messages', repeat=5)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
# post claim
self.simulate_post(path + '/claims', self.project_id,
body='{"ttl": 100, "grace": 100}')
self.assertEqual(falcon.HTTP_201, self.srmock.status)
location = self.srmock.headers_dict['location']
# release claim
self.simulate_delete(location, self.project_id)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# get unclaimed messages
self.simulate_get(path + '/messages', self.project_id,
query_string='echo=true',
headers=self.headers)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
# NOTE(cpp-cabrera): regression test against bug #1203842
def test_get_nonexistent_message_404s(self):
path = self.url_prefix + '/queues/notthere/messages/a'
self.simulate_get(path)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_get_multiple_invalid_messages_204s(self):
path = self.url_prefix + '/queues/notthere/messages'
self.simulate_get(path, query_string='ids=a,b,c')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_delete_multiple_invalid_messages_204s(self):
path = self.url_prefix + '/queues/notthere/messages'
self.simulate_delete(path, query_string='ids=a,b,c')
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_delete_message_with_invalid_claim_doesnt_delete_message(self):
path = self.queue_path
resp = self._post_messages(path + '/messages', 1)
location = jsonutils.loads(resp[0])['resources'][0]
self.simulate_delete(location, self.project_id,
query_string='claim_id=invalid')
self.assertEqual(falcon.HTTP_400, self.srmock.status)
self.simulate_get(location, self.project_id)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
def test_no_duplicated_messages_path_in_href(self):
"""Test for bug 1240897."""
path = self.queue_path + '/messages'
self._post_messages(path, repeat=1)
msg_id = self._get_msg_id(self.srmock.headers_dict)
query_string = 'ids=%s' % msg_id
body = self.simulate_get(path, self.project_id,
query_string=query_string,
headers=self.headers)
messages = jsonutils.loads(body[0])
self.assertNotIn(self.queue_path + '/messages/messages',
messages[0]['href'])
def _post_messages(self, target, repeat=1):
doc = jsonutils.dumps([{'body': 239, 'ttl': 300}] * repeat)
return self.simulate_post(target, self.project_id, body=doc,
headers=self.headers)
def _get_msg_id(self, headers):
return self._get_msg_ids(headers)[0]
def _get_msg_ids(self, headers):
return headers['location'].rsplit('=', 1)[-1].split(',')
class TestMessagesMongoDBPooled(TestMessagesMongoDB):
config_file = 'wsgi_mongodb_pooled.conf'
# TODO(cpp-cabrera): remove this skipTest once pooled queue
# listing is implemented
def test_list(self):
self.skipTest("Need to implement pooled queue listing.")
class TestMessagesFaultyDriver(base.V1BaseFaulty):
config_file = 'wsgi_faulty.conf'
def test_simple(self):
project_id = 'xyz'
path = self.url_prefix + '/queues/fizbit/messages'
doc = '[{"body": 239, "ttl": 100}]'
headers = {
'Client-ID': uuidutils.generate_uuid(),
}
self.simulate_post(path, project_id,
body=doc,
headers=headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_get(path, project_id,
headers=headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_get(path + '/nonexistent', project_id)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_delete(path + '/nada', project_id)
self.assertEqual(falcon.HTTP_503, self.srmock.status)

View File

@ -1,335 +0,0 @@
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import contextlib
import ddt
import falcon
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from zaqar import tests as testing
from zaqar.tests.unit.transport.wsgi import base
@contextlib.contextmanager
def pool(test, name, weight, uri, options={}):
"""A context manager for constructing a pool for use in testing.
Deletes the pool after exiting the context.
:param test: Must expose simulate_* methods
:param name: Name for this pool
:type name: str
:type weight: int
:type uri: str
:type options: dict
:returns: (name, weight, uri, options)
:rtype: see above
"""
uri = "%s/%s" % (uri, uuidutils.generate_uuid())
doc = {'weight': weight, 'uri': uri, 'options': options}
path = test.url_prefix + '/pools/' + name
test.simulate_put(path, body=jsonutils.dumps(doc))
try:
yield name, weight, uri, options
finally:
test.simulate_delete(path)
@contextlib.contextmanager
def pools(test, count, uri):
"""A context manager for constructing pools for use in testing.
Deletes the pools after exiting the context.
:param test: Must expose simulate_* methods
:param count: Number of pools to create
:type count: int
:returns: (paths, weights, uris, options)
:rtype: ([str], [int], [str], [dict])
"""
mongo_url = uri
base = test.url_prefix + '/pools/'
args = [(base + str(i), i,
{str(i): i})
for i in range(count)]
for path, weight, option in args:
uri = "%s/%s" % (mongo_url, uuidutils.generate_uuid())
doc = {'weight': weight, 'uri': uri, 'options': option}
test.simulate_put(path, body=jsonutils.dumps(doc))
try:
yield args
finally:
for path, _, _ in args:
test.simulate_delete(path)
@ddt.ddt
class TestPoolsMongoDB(base.V1Base):
config_file = 'wsgi_mongodb_pooled.conf'
@testing.requires_mongodb
def setUp(self):
super(TestPoolsMongoDB, self).setUp()
self.doc = {'weight': 100, 'uri': self.mongodb_url}
self.pool = self.url_prefix + '/pools/' + uuidutils.generate_uuid()
self.simulate_put(self.pool, body=jsonutils.dumps(self.doc))
self.assertEqual(falcon.HTTP_201, self.srmock.status)
def tearDown(self):
super(TestPoolsMongoDB, self).tearDown()
self.simulate_delete(self.pool)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_put_pool_works(self):
name = uuidutils.generate_uuid()
weight, uri = self.doc['weight'], self.doc['uri']
with pool(self, name, weight, uri):
self.assertEqual(falcon.HTTP_201, self.srmock.status)
def test_put_raises_if_missing_fields(self):
path = self.url_prefix + '/pools/' + uuidutils.generate_uuid()
self.simulate_put(path, body=jsonutils.dumps({'weight': 100}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
self.simulate_put(path,
body=jsonutils.dumps(
{'uri': self.mongodb_url}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 2**32+1, 'big')
def test_put_raises_if_invalid_weight(self, weight):
path = self.url_prefix + '/pools/' + uuidutils.generate_uuid()
doc = {'weight': weight, 'uri': 'a'}
self.simulate_put(path,
body=jsonutils.dumps(doc))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 2**32+1, [], 'localhost:27017')
def test_put_raises_if_invalid_uri(self, uri):
path = self.url_prefix + '/pools/' + uuidutils.generate_uuid()
self.simulate_put(path,
body=jsonutils.dumps({'weight': 1, 'uri': uri}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 'wee', [])
def test_put_raises_if_invalid_options(self, options):
path = self.url_prefix + '/pools/' + uuidutils.generate_uuid()
doc = {'weight': 1, 'uri': 'a', 'options': options}
self.simulate_put(path, body=jsonutils.dumps(doc))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_put_existing_overwrites(self):
# NOTE(cabrera): setUp creates default pool
expect = self.doc
self.simulate_put(self.pool,
body=jsonutils.dumps(expect))
self.assertEqual(falcon.HTTP_201, self.srmock.status)
result = self.simulate_get(self.pool)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
doc = jsonutils.loads(result[0])
self.assertEqual(expect['weight'], doc['weight'])
self.assertEqual(expect['uri'], doc['uri'])
def test_delete_works(self):
self.simulate_delete(self.pool)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.simulate_get(self.pool)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_get_nonexisting_raises_404(self):
self.simulate_get(self.url_prefix + '/pools/nonexisting')
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def _pool_expect(self, pool, xhref, xweight, xuri):
self.assertIn('href', pool)
self.assertIn('name', pool)
self.assertEqual(xhref, pool['href'])
self.assertIn('weight', pool)
self.assertEqual(xweight, pool['weight'])
self.assertIn('uri', pool)
# NOTE(dynarro): we are using startwith because we are adding to
# pools UUIDs, to avoid dupplications
self.assertTrue(pool['uri'].startswith(xuri))
def test_get_works(self):
result = self.simulate_get(self.pool)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
pool = jsonutils.loads(result[0])
self._pool_expect(pool, self.pool, self.doc['weight'],
self.doc['uri'])
def test_detailed_get_works(self):
result = self.simulate_get(self.pool,
query_string='detailed=True')
self.assertEqual(falcon.HTTP_200, self.srmock.status)
pool = jsonutils.loads(result[0])
self._pool_expect(pool, self.pool, self.doc['weight'],
self.doc['uri'])
self.assertIn('options', pool)
self.assertEqual({}, pool['options'])
def test_patch_raises_if_missing_fields(self):
self.simulate_patch(self.pool,
body=jsonutils.dumps({'location': 1}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def _patch_test(self, doc):
self.simulate_patch(self.pool,
body=jsonutils.dumps(doc))
self.assertEqual(falcon.HTTP_200, self.srmock.status)
result = self.simulate_get(self.pool,
query_string='detailed=True')
self.assertEqual(falcon.HTTP_200, self.srmock.status)
pool = jsonutils.loads(result[0])
self._pool_expect(pool, self.pool, doc['weight'],
doc['uri'])
self.assertEqual(doc['options'], pool['options'])
def test_patch_works(self):
doc = {'weight': 101,
'uri': self.mongodb_url,
'options': {'a': 1}}
self._patch_test(doc)
def test_patch_works_with_extra_fields(self):
doc = {'weight': 101,
'uri': self.mongodb_url,
'options': {'a': 1},
'location': 100, 'partition': 'taco'}
self._patch_test(doc)
@ddt.data(-1, 2**32+1, 'big')
def test_patch_raises_400_on_invalid_weight(self, weight):
self.simulate_patch(self.pool,
body=jsonutils.dumps({'weight': weight}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 2**32+1, [], 'localhost:27017')
def test_patch_raises_400_on_invalid_uri(self, uri):
self.simulate_patch(self.pool,
body=jsonutils.dumps({'uri': uri}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 'wee', [])
def test_patch_raises_400_on_invalid_options(self, options):
self.simulate_patch(self.pool,
body=jsonutils.dumps({'options': options}))
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_patch_raises_404_if_pool_not_found(self):
self.simulate_patch(self.url_prefix + '/pools/notexists',
body=jsonutils.dumps({'weight': 1}))
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_empty_listing(self):
self.simulate_delete(self.pool)
result = self.simulate_get(self.url_prefix + '/pools')
results = jsonutils.loads(result[0])
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(0, len(results['pools']))
self.assertIn('links', results)
def _listing_test(self, count=10, limit=10,
marker=None, detailed=False):
# NOTE(cpp-cabrera): delete initial pool - it will interfere
# with listing tests
self.simulate_delete(self.pool)
query = 'limit={0}&detailed={1}'.format(limit, detailed)
if marker:
query += '&marker={0}'.format(marker)
with pools(self, count, self.doc['uri']) as expected:
result = self.simulate_get(self.url_prefix + '/pools',
query_string=query)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
results = jsonutils.loads(result[0])
self.assertIsInstance(results, dict)
self.assertIn('pools', results)
self.assertIn('links', results)
pool_list = results['pools']
link = results['links'][0]
self.assertEqual('next', link['rel'])
href = falcon.uri.parse_query_string(link['href'].split('?')[1])
self.assertIn('marker', href)
self.assertEqual(str(limit), href['limit'])
self.assertEqual(str(detailed).lower(), href['detailed'])
next_query_string = ('marker={marker}&limit={limit}'
'&detailed={detailed}').format(**href)
next_result = self.simulate_get(link['href'].split('?')[0],
query_string=next_query_string)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
next_pool = jsonutils.loads(next_result[0])
next_pool_list = next_pool['pools']
self.assertIn('links', next_pool)
if limit < count:
self.assertEqual(min(limit, count-limit),
len(next_pool_list))
else:
# NOTE(jeffrey4l): when limit >= count, there will be no
# pools in the 2nd page.