Switch to testr from pytest
Align to the other Openstack project and use testr instead of pytest Includes: - pylint and pep8 fixes. - py34 compliance - requirements updated - updated path to dsvm gate job Change-Id: I10a5ea8d581029eba8f3ab5e016a1dd1919fa117 Depends-On: I4f9050cd551e87d2e398b6f94dc904154d0b1b13
This commit is contained in:
parent
e89b5fc80e
commit
8d220b56a7
13
.coveragerc
13
.coveragerc
|
@ -1,3 +1,14 @@
|
|||
# .coveragerc to control coverage.py
|
||||
[run]
|
||||
omit=*__init__.py
|
||||
branch = True
|
||||
omit = freezer_api/tests/*
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
||||
|
||||
[paths]
|
||||
source =
|
||||
freezer_api/
|
||||
|
||||
[html]
|
||||
directory = term
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \
|
||||
${PYTHON:-python} -m subunit.run discover -s ${OS_TEST_PATH:-./freezer_api/tests/unit} -t . $LISTOPT $IDOPTION
|
||||
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
||||
group_regex=([^\.]+\.)+
|
|
@ -27,7 +27,7 @@ def json_translator(req, app):
|
|||
if isinstance(resp.body, dict):
|
||||
try:
|
||||
resp.body = json.dumps(resp.body)
|
||||
except:
|
||||
except Exception:
|
||||
raise freezer_api_exc.FreezerAPIException(
|
||||
'Internal server error: malformed json reply')
|
||||
return resp
|
||||
|
|
|
@ -16,8 +16,8 @@ limitations under the License.
|
|||
"""
|
||||
|
||||
import falcon
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from freezer_api.api.common import resource
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
|
||||
|
||||
class ActionsCollectionResource(resource.BaseResource):
|
||||
|
|
|
@ -16,10 +16,11 @@ limitations under the License.
|
|||
"""
|
||||
|
||||
import falcon
|
||||
from six import iteritems
|
||||
import uuid
|
||||
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from freezer_api.api.common import resource
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
|
||||
|
||||
class JobsBaseResource(resource.BaseResource):
|
||||
|
@ -155,8 +156,8 @@ class JobsEvent(resource.BaseResource):
|
|||
doc = self.json_body(req)
|
||||
|
||||
try:
|
||||
event, params = next(doc.iteritems())
|
||||
except:
|
||||
event, params = next(iteritems(doc))
|
||||
except Exception:
|
||||
raise freezer_api_exc.BadDataFormat("Bad event request format")
|
||||
|
||||
job_doc = self.db.get_job(user_id=user_id,
|
||||
|
@ -288,7 +289,7 @@ class Job(object):
|
|||
def expand_default_properties(self):
|
||||
action_defaults = self.doc.pop("action_defaults")
|
||||
if isinstance(action_defaults, dict):
|
||||
for key, val in action_defaults.items():
|
||||
for key, val in iteritems(action_defaults):
|
||||
for action in self.doc.get("job_actions"):
|
||||
if action["freezer_action"].get(key) is None:
|
||||
action["freezer_action"][key] = val
|
||||
|
|
|
@ -16,8 +16,10 @@ limitations under the License.
|
|||
"""
|
||||
|
||||
import falcon
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from six import iteritems
|
||||
|
||||
from freezer_api.api.common import resource
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
import time
|
||||
|
||||
|
||||
|
@ -114,8 +116,8 @@ class SessionsAction(resource.BaseResource):
|
|||
doc = self.json_body(req)
|
||||
|
||||
try:
|
||||
action, params = next(doc.iteritems())
|
||||
except:
|
||||
action, params = next(iteritems(doc))
|
||||
except Exception:
|
||||
raise freezer_api_exc.BadDataFormat("Bad action request format")
|
||||
|
||||
session_doc = self.db.get_session(user_id=user_id,
|
||||
|
@ -231,7 +233,7 @@ class Session(resource.BaseResource):
|
|||
"""
|
||||
check the status of all the jobs and return the overall session result
|
||||
"""
|
||||
for job in self.doc['jobs'].itervalues():
|
||||
for job in self.doc['jobs'].values():
|
||||
if job['status'] != 'completed':
|
||||
return 'running'
|
||||
if job['result'] != 'success':
|
||||
|
@ -241,7 +243,7 @@ class Session(resource.BaseResource):
|
|||
def set_job_end(self, job_id, result, timestamp):
|
||||
try:
|
||||
job = self.doc['jobs'][job_id]
|
||||
except:
|
||||
except Exception:
|
||||
raise freezer_api_exc.BadDataFormat('job_id not found in session')
|
||||
job['status'] = 'completed'
|
||||
job['result'] = result
|
||||
|
@ -250,7 +252,7 @@ class Session(resource.BaseResource):
|
|||
def set_job_start(self, job_id, timestamp):
|
||||
try:
|
||||
job = self.doc['jobs'][job_id]
|
||||
except:
|
||||
except Exception:
|
||||
raise freezer_api_exc.BadDataFormat('job_id not found in session')
|
||||
job['status'] = 'running'
|
||||
job['result'] = ''
|
||||
|
|
|
@ -15,11 +15,12 @@ limitations under the License.
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import falcon
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import falcon
|
||||
from keystonemiddleware import auth_token
|
||||
from oslo_config import cfg
|
||||
from wsgiref import simple_server
|
||||
|
@ -28,7 +29,7 @@ from freezer_api.api.common import middleware
|
|||
from freezer_api.api import v1
|
||||
from freezer_api.api import versions
|
||||
|
||||
from freezer_api.common._i18n import _, _LI, _LW
|
||||
from freezer_api.common import _i18n
|
||||
from freezer_api.common import config
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from freezer_api.common import log
|
||||
|
@ -55,7 +56,7 @@ def get_application(db):
|
|||
if 'keystone_authtoken' in config.CONF:
|
||||
app = auth_token.AuthProtocol(app, {})
|
||||
else:
|
||||
logging.warning(_LW("keystone authentication disabled"))
|
||||
logging.warning(_i18n._LW("keystone authentication disabled"))
|
||||
|
||||
app = middleware.HealthApp(app=app, path='/v1/health')
|
||||
|
||||
|
@ -65,15 +66,15 @@ config_file = '/etc/freezer-api.conf'
|
|||
config_files_list = [config_file] if os.path.isfile(config_file) else []
|
||||
config.parse_args(args=[], default_config_files=config_files_list)
|
||||
log.setup()
|
||||
logging.info(_LI("Freezer API starting"))
|
||||
logging.info(_LI("Freezer config file(s) used: %s")
|
||||
logging.info(_i18n._LI("Freezer API starting"))
|
||||
logging.info(_i18n._LI("Freezer config file(s) used: %s")
|
||||
% ', '.join(cfg.CONF.config_file))
|
||||
try:
|
||||
db = driver.get_db()
|
||||
application = get_application(db)
|
||||
except Exception as err:
|
||||
message = _('Unable to start server: %s ') % err
|
||||
print message
|
||||
message = _i18n._('Unable to start server: %s ') % err
|
||||
print(message)
|
||||
logging.fatal(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -86,14 +87,14 @@ def main():
|
|||
if ':' in ip:
|
||||
ip, port = ip.split(':')
|
||||
httpd = simple_server.make_server(ip, int(port), application)
|
||||
message = _('Server listening on %(ip)s:%(port)s'
|
||||
message = _i18n._('Server listening on %(ip)s:%(port)s'
|
||||
% {'ip': ip, 'port': port})
|
||||
print message
|
||||
print(message)
|
||||
logging.info(message)
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print _("\nThanks, Bye")
|
||||
print(_i18n._("\nThanks, Bye"))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
|
|
@ -16,11 +16,13 @@ limitations under the License.
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import ConfigParser
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from six.moves import builtins
|
||||
from six.moves import configparser
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
@ -76,7 +78,7 @@ class ElastichsearchEngine(object):
|
|||
|
||||
def askput_number_of_replicas(self, n):
|
||||
if self.args.test_only:
|
||||
print "Number of replicas don't match"
|
||||
print("Number of replicas don't match")
|
||||
self.exit_code = os.EX_DATAERR
|
||||
return
|
||||
prompt_message = ('Number of replicas needs to be '
|
||||
|
@ -92,7 +94,7 @@ class ElastichsearchEngine(object):
|
|||
r = requests.put(url, data=json.dumps(body_dict))
|
||||
self.verbose_print("response: {0}".format(r))
|
||||
if r.status_code == requests.codes.OK:
|
||||
print "Replica number set to {0}".format(self.args.replicas)
|
||||
print("Replica number set to {0}".format(self.args.replicas))
|
||||
else:
|
||||
raise NumberOfReplicasException('Error setting the replica '
|
||||
'number, {0}: {1}'
|
||||
|
@ -100,9 +102,9 @@ class ElastichsearchEngine(object):
|
|||
|
||||
def put_mappings(self, mappings):
|
||||
self.check_index_exists()
|
||||
for es_type, mapping in mappings.iteritems():
|
||||
for es_type, mapping in mappings.items():
|
||||
if self.mapping_match(es_type, mapping):
|
||||
print '{0}/{1} MATCHES'.format(self.es_index, es_type)
|
||||
print('{0}/{1} MATCHES'.format(self.es_index, es_type))
|
||||
else:
|
||||
self.askput_mapping(es_type, mapping)
|
||||
return self.exit_code
|
||||
|
@ -131,7 +133,7 @@ class ElastichsearchEngine(object):
|
|||
|
||||
def askput_mapping(self, es_type, mapping):
|
||||
if self.args.test_only:
|
||||
print '{0}/{1} DOES NOT MATCH'.format(self.es_index, es_type)
|
||||
print('{0}/{1} DOES NOT MATCH'.format(self.es_index, es_type))
|
||||
self.exit_code = os.EX_DATAERR
|
||||
return
|
||||
prompt_message = ('{0}/{1}/{2} needs to be updated. '
|
||||
|
@ -149,7 +151,7 @@ class ElastichsearchEngine(object):
|
|||
self.verbose_print('Unable to merge mappings.')
|
||||
self.verbose_print(e, 2)
|
||||
else:
|
||||
print "Mappings updated"
|
||||
print("Mappings updated")
|
||||
return
|
||||
|
||||
if self.args.yes and not self.args.erase:
|
||||
|
@ -189,7 +191,7 @@ class ElastichsearchEngine(object):
|
|||
r = requests.put(url, data=json.dumps(mapping))
|
||||
self.verbose_print("response: {0}".format(r))
|
||||
if r.status_code == requests.codes.OK:
|
||||
print "Type {0} mapping created".format(url)
|
||||
print("Type {0} mapping created".format(url))
|
||||
else:
|
||||
raise MergeMappingException('Type mapping creation error {0}: '
|
||||
'{1}'.format(r.status_code, r.text))
|
||||
|
@ -198,7 +200,7 @@ class ElastichsearchEngine(object):
|
|||
if assume_yes:
|
||||
return True
|
||||
while True:
|
||||
selection = raw_input(message)
|
||||
selection = builtins.input(message)
|
||||
if selection.upper() == 'Y':
|
||||
return True
|
||||
elif selection.upper() == 'N':
|
||||
|
@ -275,7 +277,7 @@ def parse_config_file(fname):
|
|||
|
||||
host, port, index, number_of_replicas = None, 0, None, 0
|
||||
|
||||
config = ConfigParser.ConfigParser()
|
||||
config = configparser.ConfigParser()
|
||||
config.read(fname)
|
||||
try:
|
||||
if config.has_option('storage', 'endpoint'):
|
||||
|
@ -288,15 +290,15 @@ def parse_config_file(fname):
|
|||
if match:
|
||||
host = match.group(1)
|
||||
port = int(match.group(2))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
index = config.get('storage', 'index')
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
number_of_replicas = int(config.get('storage', 'number_of_replicas'))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return host, port, index, number_of_replicas
|
||||
|
||||
|
@ -313,7 +315,7 @@ def get_db_params(args):
|
|||
conf_fname = args.config_file or find_config_file()
|
||||
|
||||
if args.verbose:
|
||||
print "using config file: {0}".format(conf_fname)
|
||||
print("using config file: {0}".format(conf_fname))
|
||||
|
||||
conf_host, conf_port, conf_db_index, number_of_replicas = \
|
||||
parse_config_file(conf_fname)
|
||||
|
@ -364,8 +366,8 @@ def main():
|
|||
es_index=elasticsearch_index,
|
||||
args=args)
|
||||
if args.verbose:
|
||||
print " db url: {0}".format(elasticsearch_url)
|
||||
print "db index: {0}".format(elasticsearch_index)
|
||||
print(" db url: {0}".format(elasticsearch_url))
|
||||
print("db index: {0}".format(elasticsearch_index))
|
||||
|
||||
if args.select_mapping:
|
||||
mappings = {args.select_mapping: mappings[args.select_mapping]}
|
||||
|
@ -374,7 +376,7 @@ def main():
|
|||
es_manager.put_mappings(mappings)
|
||||
es_manager.set_number_of_replicas(number_of_replicas)
|
||||
except Exception as e:
|
||||
print "ERROR {0}".format(e)
|
||||
print("ERROR {0}".format(e))
|
||||
return os.EX_DATAERR
|
||||
|
||||
return es_manager.exit_code
|
||||
|
|
|
@ -14,6 +14,8 @@ See the License for the specific language governing permissions and
|
|||
limitations under the License.
|
||||
|
||||
"""
|
||||
import copy
|
||||
|
||||
|
||||
freezer_action_properties = {
|
||||
"action": {
|
||||
|
@ -330,11 +332,17 @@ additional_action_properties = {
|
|||
}
|
||||
|
||||
|
||||
tmp_prop = freezer_action_properties.items()
|
||||
tmp_add_prop = additional_action_properties.items()
|
||||
joined_properties = {}
|
||||
joined_properties.update(tmp_prop)
|
||||
joined_properties.update(tmp_add_prop)
|
||||
|
||||
|
||||
action_schema = {
|
||||
"id": "/",
|
||||
"type": "object",
|
||||
"properties": dict(freezer_action_properties.items() +
|
||||
additional_action_properties.items()),
|
||||
"properties": joined_properties,
|
||||
"additionalProperties": True,
|
||||
"required": [
|
||||
"action_id",
|
||||
|
@ -342,12 +350,10 @@ action_schema = {
|
|||
]
|
||||
}
|
||||
|
||||
|
||||
action_patch_schema = {
|
||||
"id": "/",
|
||||
"type": "object",
|
||||
"properties": dict(freezer_action_properties.items() +
|
||||
additional_action_properties.items()),
|
||||
"properties": joined_properties,
|
||||
"additionalProperties": True
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@ limitations under the License.
|
|||
"""
|
||||
|
||||
|
||||
from oslo_config import cfg
|
||||
import logging
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
logging_cli_opts = [
|
||||
|
@ -54,7 +54,7 @@ CONF.register_cli_opts(logging_cli_opts)
|
|||
def setup():
|
||||
try:
|
||||
log_file = CONF['log_file'] # cli provided
|
||||
except:
|
||||
except Exception:
|
||||
log_file = CONF['logging_file'] # .conf file
|
||||
logging.basicConfig(
|
||||
filename=log_file,
|
||||
|
|
|
@ -18,8 +18,8 @@ import jsonschema
|
|||
import time
|
||||
import uuid
|
||||
|
||||
from freezer_api.common import json_schemas
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from freezer_api.common import json_schemas
|
||||
|
||||
|
||||
class BackupMetadataDoc:
|
||||
|
@ -36,7 +36,7 @@ class BackupMetadataDoc:
|
|||
try:
|
||||
assert (self.backup_id is not '')
|
||||
assert (self.user_id is not '')
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import os
|
|||
|
||||
from oslo_config import cfg
|
||||
|
||||
from freezer_api.common._i18n import _, _LI
|
||||
from freezer_api.common import _i18n
|
||||
from freezer_api.storage import elastic
|
||||
|
||||
|
||||
|
@ -84,8 +84,8 @@ def get_db():
|
|||
opts = get_options()
|
||||
db_engine = opts.pop('db')
|
||||
if db_engine == 'elasticsearch':
|
||||
logging.debug(_LI('Elastichsearch config options: %s') % str(opts))
|
||||
logging.debug(_i18n._LI('Elastichsearch config options: %s') % str(opts))
|
||||
db = elastic.ElasticSearchEngine(**opts)
|
||||
else:
|
||||
raise Exception(_('Database Engine %s not supported') % db_engine)
|
||||
raise Exception(_i18n._('Database Engine %s not supported') % db_engine)
|
||||
return db
|
||||
|
|
|
@ -21,12 +21,12 @@ from elasticsearch import helpers as es_helpers
|
|||
import logging
|
||||
import uuid
|
||||
|
||||
from freezer_api.common._i18n import _, _LI
|
||||
from freezer_api.common import _i18n
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
from freezer_api.common.utils import ActionDoc
|
||||
from freezer_api.common.utils import BackupMetadataDoc
|
||||
from freezer_api.common.utils import JobDoc
|
||||
from freezer_api.common.utils import ActionDoc
|
||||
from freezer_api.common.utils import SessionDoc
|
||||
from freezer_api.common import exceptions as freezer_api_exc
|
||||
|
||||
|
||||
class TypeManager:
|
||||
|
@ -53,9 +53,9 @@ class TypeManager:
|
|||
base_filter = TypeManager.get_base_search_filter(user_id, search)
|
||||
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||
return {'query': {'filtered': query_filter}}
|
||||
except:
|
||||
except Exception:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('search operation failed: query not valid'))
|
||||
message=_i18n._('search operation failed: query not valid'))
|
||||
|
||||
def get(self, user_id, doc_id):
|
||||
try:
|
||||
|
@ -65,13 +65,13 @@ class TypeManager:
|
|||
doc = res['_source']
|
||||
except elasticsearch.TransportError:
|
||||
raise freezer_api_exc.DocumentNotFound(
|
||||
message=_('No document found with ID %s') % doc_id)
|
||||
message=_i18n._('No document found with ID %s') % doc_id)
|
||||
except Exception as e:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Get operation failed: %s') % e)
|
||||
message=_i18n._('Get operation failed: %s') % e)
|
||||
if doc['user_id'] != user_id:
|
||||
raise freezer_api_exc.AccessForbidden(
|
||||
_("Document access forbidden"))
|
||||
_i18n._("Document access forbidden"))
|
||||
if '_version' in res:
|
||||
doc['_version'] = res['_version']
|
||||
return doc
|
||||
|
@ -84,10 +84,10 @@ class TypeManager:
|
|||
size=limit, from_=offset, body=query_dsl)
|
||||
except elasticsearch.ConnectionError:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('unable to connect to db server'))
|
||||
message=_i18n._('unable to connect to db server'))
|
||||
except Exception as e:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('search operation failed: %s') % e)
|
||||
message=_i18n._('search operation failed: %s') % e)
|
||||
hit_list = res['hits']['hits']
|
||||
return [x['_source'] for x in hit_list]
|
||||
|
||||
|
@ -102,10 +102,10 @@ class TypeManager:
|
|||
if e.status_code == 409:
|
||||
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('index operation failed %s') % e)
|
||||
message=_i18n._('index operation failed %s') % e)
|
||||
except Exception as e:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('index operation failed %s') % e)
|
||||
message=_i18n._('index operation failed %s') % e)
|
||||
return (created, version)
|
||||
|
||||
def delete(self, user_id, doc_id):
|
||||
|
@ -115,7 +115,7 @@ class TypeManager:
|
|||
doc_type=self.doc_type, query=query_dsl)
|
||||
except Exception as e:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Scan operation failed: %s') % e)
|
||||
message=_i18n._('Scan operation failed: %s') % e)
|
||||
id = None
|
||||
for res in results:
|
||||
id = res.get('_id')
|
||||
|
@ -123,7 +123,7 @@ class TypeManager:
|
|||
self.es.delete(index=self.index, doc_type=self.doc_type, id=id)
|
||||
except Exception as e:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Delete operation failed: %s') % e)
|
||||
message=_i18n._('Delete operation failed: %s') % e)
|
||||
return id
|
||||
|
||||
|
||||
|
@ -189,11 +189,11 @@ class JobTypeManager(TypeManager):
|
|||
if e.status_code == 409:
|
||||
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||
raise freezer_api_exc.DocumentNotFound(
|
||||
message=_('Unable to find job to update '
|
||||
message=_i18n._('Unable to find job to update '
|
||||
'with id %(id)s. %(e)s') % {'id': job_id, 'e': e})
|
||||
except Exception:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Unable to update job with id %s') % job_id)
|
||||
message=_i18n._('Unable to update job with id %s') % job_id)
|
||||
return version
|
||||
|
||||
|
||||
|
@ -222,11 +222,11 @@ class ActionTypeManager(TypeManager):
|
|||
if e.status_code == 409:
|
||||
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||
raise freezer_api_exc.DocumentNotFound(
|
||||
message=_('Unable to find action to update '
|
||||
message=_i18n._('Unable to find action to update '
|
||||
'with id %s') % action_id)
|
||||
except Exception:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Unable to update action with id %s') % action_id)
|
||||
message=_i18n._('Unable to update action with id %s') % action_id)
|
||||
return version
|
||||
|
||||
|
||||
|
@ -255,12 +255,12 @@ class SessionTypeManager(TypeManager):
|
|||
if e.status_code == 409:
|
||||
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||
raise freezer_api_exc.DocumentNotFound(
|
||||
message=_(('Unable to update session '
|
||||
message=_i18n._(('Unable to update session '
|
||||
'%s. %s') % (session_id, e)))
|
||||
|
||||
except Exception:
|
||||
raise freezer_api_exc.StorageEngineError(
|
||||
message=_('Unable to update session with id %s') % session_id)
|
||||
message=_i18n._('Unable to update session with id %s') % session_id)
|
||||
return version
|
||||
|
||||
|
||||
|
@ -269,7 +269,7 @@ class ElasticSearchEngine(object):
|
|||
def __init__(self, index='freezer', **kwargs):
|
||||
self.index = index
|
||||
self.es = elasticsearch.Elasticsearch(**kwargs)
|
||||
logging.info(_LI('Storage backend: Elasticsearch '
|
||||
logging.info(_i18n._LI('Storage backend: Elasticsearch '
|
||||
'at %s') % kwargs['hosts'])
|
||||
self.backup_manager = BackupTypeManager(self.es, 'backups')
|
||||
self.client_manager = ClientTypeManager(self.es, 'clients')
|
||||
|
@ -290,12 +290,12 @@ class ElasticSearchEngine(object):
|
|||
# raises if data is malformed (HTTP_400) or already present (HTTP_409)
|
||||
backup_metadata_doc = BackupMetadataDoc(user_id, user_name, doc)
|
||||
if not backup_metadata_doc.is_valid():
|
||||
raise freezer_api_exc.BadDataFormat(message=_('Bad Data Format'))
|
||||
raise freezer_api_exc.BadDataFormat(message=_i18n._('Bad Data Format'))
|
||||
backup_id = backup_metadata_doc.backup_id
|
||||
existing = self.backup_manager.search(user_id, backup_id)
|
||||
if existing:
|
||||
raise freezer_api_exc.DocumentExists(
|
||||
message=_('Backup data already existing with ID %s') %
|
||||
message=_i18n._('Backup data already existing with ID %s') %
|
||||
backup_id)
|
||||
self.backup_manager.insert(backup_metadata_doc.serialize())
|
||||
return backup_id
|
||||
|
@ -315,16 +315,16 @@ class ElasticSearchEngine(object):
|
|||
def add_client(self, user_id, doc):
|
||||
client_id = doc.get('client_id', None)
|
||||
if client_id is None:
|
||||
raise freezer_api_exc.BadDataFormat(message=_('Missing client ID'))
|
||||
raise freezer_api_exc.BadDataFormat(message=_i18n._('Missing client ID'))
|
||||
existing = self.client_manager.search(user_id, client_id)
|
||||
if existing:
|
||||
raise freezer_api_exc.DocumentExists(
|
||||
message=_('Client already registered with ID %s') % client_id)
|
||||
message=_i18n._('Client already registered with ID %s') % client_id)
|
||||
client_doc = {'client': doc,
|
||||
'user_id': user_id,
|
||||
'uuid': uuid.uuid4().hex}
|
||||
self.client_manager.insert(client_doc)
|
||||
logging.info(_LI('Client registered, client_id: %s') % client_id)
|
||||
logging.info(_i18n._LI('Client registered, client_id: %s') % client_id)
|
||||
return client_id
|
||||
|
||||
def delete_client(self, user_id, client_id):
|
||||
|
@ -344,7 +344,7 @@ class ElasticSearchEngine(object):
|
|||
jobdoc = JobDoc.create(doc, user_id)
|
||||
job_id = jobdoc['job_id']
|
||||
self.job_manager.insert(jobdoc, job_id)
|
||||
logging.info(_LI('Job registered, job id: %s') % job_id)
|
||||
logging.info(_i18n._LI('Job registered, job id: %s') % job_id)
|
||||
return job_id
|
||||
|
||||
def delete_job(self, user_id, job_id):
|
||||
|
@ -357,7 +357,7 @@ class ElasticSearchEngine(object):
|
|||
assert (self.job_manager.get(user_id, job_id))
|
||||
|
||||
version = self.job_manager.update(job_id, valid_patch)
|
||||
logging.info(_LI('Job %(id)s updated to version %(version)s') %
|
||||
logging.info(_i18n._LI('Job %(id)s updated to version %(version)s') %
|
||||
{'id': job_id, 'version': version})
|
||||
return version
|
||||
|
||||
|
@ -373,9 +373,9 @@ class ElasticSearchEngine(object):
|
|||
|
||||
(created, version) = self.job_manager.insert(valid_doc, job_id)
|
||||
if created:
|
||||
logging.info(_LI('Job %s created') % job_id)
|
||||
logging.info(_i18n._LI('Job %s created') % job_id)
|
||||
else:
|
||||
logging.info(_LI('Job %(id)s replaced with version %(version)s' %
|
||||
logging.info(_i18n._LI('Job %(id)s replaced with version %(version)s' %
|
||||
{'id': job_id, 'version': version}))
|
||||
return version
|
||||
|
||||
|
@ -393,7 +393,7 @@ class ElasticSearchEngine(object):
|
|||
actiondoc = ActionDoc.create(doc, user_id)
|
||||
action_id = actiondoc['action_id']
|
||||
self.action_manager.insert(actiondoc, action_id)
|
||||
logging.info(_LI('Action registered, action id: %s') % action_id)
|
||||
logging.info(_i18n._LI('Action registered, action id: %s') % action_id)
|
||||
return action_id
|
||||
|
||||
def delete_action(self, user_id, action_id):
|
||||
|
@ -406,7 +406,7 @@ class ElasticSearchEngine(object):
|
|||
assert (self.action_manager.get(user_id, action_id))
|
||||
|
||||
version = self.action_manager.update(action_id, valid_patch)
|
||||
logging.info(_LI('Action %(id)s updated to version %(version)s' %
|
||||
logging.info(_i18n._LI('Action %(id)s updated to version %(version)s' %
|
||||
{'id': action_id, 'version': version}))
|
||||
return version
|
||||
|
||||
|
@ -422,9 +422,9 @@ class ElasticSearchEngine(object):
|
|||
|
||||
(created, version) = self.action_manager.insert(valid_doc, action_id)
|
||||
if created:
|
||||
logging.info(_LI('Action %s created') % action_id)
|
||||
logging.info(_i18n._LI('Action %s created') % action_id)
|
||||
else:
|
||||
logging.info(_LI('Action %(id)s replaced with version %(version)s'
|
||||
logging.info(_i18n._LI('Action %(id)s replaced with version %(version)s'
|
||||
% {'id': action_id, 'version': version}))
|
||||
return version
|
||||
|
||||
|
@ -442,7 +442,7 @@ class ElasticSearchEngine(object):
|
|||
session_doc = SessionDoc.create(doc, user_id)
|
||||
session_id = session_doc['session_id']
|
||||
self.session_manager.insert(session_doc, session_id)
|
||||
logging.info(_LI('Session registered, session id: %s') % session_id)
|
||||
logging.info(_i18n._LI('Session registered, session id: %s') % session_id)
|
||||
return session_id
|
||||
|
||||
def delete_session(self, user_id, session_id):
|
||||
|
@ -455,7 +455,7 @@ class ElasticSearchEngine(object):
|
|||
assert (self.session_manager.get(user_id, session_id))
|
||||
|
||||
version = self.session_manager.update(session_id, valid_patch)
|
||||
logging.info(_LI('Session %(id)s updated to version %(version)s' %
|
||||
logging.info(_i18n._LI('Session %(id)s updated to version %(version)s' %
|
||||
{'id': session_id, 'version': version}))
|
||||
return version
|
||||
|
||||
|
@ -471,8 +471,8 @@ class ElasticSearchEngine(object):
|
|||
|
||||
(created, version) = self.session_manager.insert(valid_doc, session_id)
|
||||
if created:
|
||||
logging.info(_LI('Session %s created') % session_id)
|
||||
logging.info(_i18n._LI('Session %s created') % session_id)
|
||||
else:
|
||||
logging.info(_LI('Session %(id)s replaced with version %(version)s'
|
||||
logging.info(_i18n._LI('Session %(id)s replaced with version %(version)s'
|
||||
% {'id': session_id, 'version': version}))
|
||||
return version
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
# Install packages from test-requirements.txt
|
||||
sudo pip install -r /opt/stack/new/freezer-api/test-requirements.txt
|
||||
|
||||
cd /opt/stack/new/freezer-api/tests
|
||||
cd /opt/stack/new/freezer-api/freezer_api/tests
|
||||
echo 'Running freezer-api integration tests'
|
||||
# Here it goes the command to execute integration tests
|
||||
#sudo ./run_tests.sh
|
|
@ -21,7 +21,7 @@ from mock import Mock, patch
|
|||
import random
|
||||
import falcon
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from freezer_api.api.v1 import actions as v1_actions
|
|
@ -23,7 +23,7 @@ import falcon
|
|||
from freezer_api.api.v1 import backups
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
|
||||
|
||||
class TestBackupsCollectionResource(unittest.TestCase):
|
|
@ -18,7 +18,7 @@ from mock import Mock, patch
|
|||
import falcon
|
||||
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from freezer_api.api.v1 import clients as v1_clients
|
|
@ -16,12 +16,12 @@ limitations under the License.
|
|||
"""
|
||||
|
||||
import os
|
||||
from six.moves import builtins
|
||||
import unittest
|
||||
import json
|
||||
from mock import Mock, patch
|
||||
|
||||
import requests
|
||||
|
||||
from freezer_api.cmd.db_init import (ElastichsearchEngine,
|
||||
get_args,
|
||||
find_config_file,
|
||||
|
@ -182,13 +182,13 @@ class TestElasticsearchEngine(unittest.TestCase):
|
|||
self.assertRaises(Exception, self.es_manager.put_mapping, 'jobs', self.test_mappings['jobs'])
|
||||
|
||||
def test_proceed_returns_true_on_user_y(self):
|
||||
with patch('__builtin__.raw_input', return_value='y') as _raw_input:
|
||||
with patch('six.moves.builtins.input', return_value='y') as _raw_input:
|
||||
res = self.es_manager.proceed('fancy a drink ?')
|
||||
self.assertTrue(res)
|
||||
_raw_input.assert_called_once_with('fancy a drink ?')
|
||||
|
||||
def test_proceed_returns_false_on_user_n(self):
|
||||
with patch('__builtin__.raw_input', return_value='n') as _raw_input:
|
||||
with patch('six.moves.builtins.input', return_value='n') as _raw_input:
|
||||
res = self.es_manager.proceed('are you drunk ?')
|
||||
self.assertFalse(res)
|
||||
_raw_input.assert_called_once_with('are you drunk ?')
|
||||
|
@ -353,7 +353,7 @@ class TestDbInit(unittest.TestCase):
|
|||
res = find_config_file()
|
||||
self.assertEquals(DEFAULT_CONF_PATH, res)
|
||||
|
||||
@patch('freezer_api.cmd.db_init.ConfigParser.ConfigParser')
|
||||
@patch('freezer_api.cmd.db_init.configparser.ConfigParser')
|
||||
def test_parse_config_file_return_config_file_params(self, mock_ConfigParser):
|
||||
mock_config = Mock()
|
||||
mock_ConfigParser.return_value = mock_config
|
|
@ -22,7 +22,7 @@ from mock import Mock, patch
|
|||
from elasticsearch import TransportError
|
||||
|
||||
from freezer_api.storage import elastic
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ from mock import Mock, patch
|
|||
|
||||
import falcon
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common import exceptions
|
||||
|
||||
|
|
@ -31,6 +31,7 @@ class TestHomedocResource(unittest.TestCase):
|
|||
|
||||
def test_on_get_return_resources_information(self):
|
||||
self.resource.on_get(self.req, self.req)
|
||||
result = json.loads(self.req.data)
|
||||
result = json.loads(self.req.data.decode('utf-8'))
|
||||
print("TEST HIME DOC RESULT: {}".format(result))
|
||||
expected_result = v1.homedoc.HOME_DOC
|
||||
self.assertEquals(result, expected_result)
|
|
@ -22,7 +22,7 @@ from mock import Mock, patch
|
|||
import random
|
||||
import json
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from freezer_api.api.v1 import jobs as v1_jobs
|
||||
|
@ -459,13 +459,13 @@ class TestJobs(unittest.TestCase):
|
|||
mock_start.assert_called_once_with('my_params')
|
||||
|
||||
@patch.object(v1_jobs.Job, 'stop')
|
||||
def test_execute_start_event(self, mock_stop):
|
||||
def test_execute_stop_event(self, mock_stop):
|
||||
job = v1_jobs.Job({})
|
||||
res = job.execute_event('stop', 'my_params')
|
||||
mock_stop.assert_called_once_with('my_params')
|
||||
|
||||
@patch.object(v1_jobs.Job, 'abort')
|
||||
def test_execute_start_event(self, mock_abort):
|
||||
def test_execute_abort_event(self, mock_abort):
|
||||
job = v1_jobs.Job({})
|
||||
res = job.execute_event('abort', 'my_params')
|
||||
mock_abort.assert_called_once_with('my_params')
|
|
@ -21,7 +21,7 @@ from mock import Mock, patch
|
|||
import random
|
||||
import falcon
|
||||
|
||||
from common import *
|
||||
from .common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from freezer_api.api.v1 import sessions as v1_sessions
|
|
@ -22,7 +22,7 @@ from mock import Mock, patch
|
|||
from freezer_api.common import utils
|
||||
|
||||
from freezer_api.common.exceptions import *
|
||||
from common import *
|
||||
from .common import *
|
||||
|
||||
DATA_backup_metadata = {
|
||||
"container": "freezer_container",
|
||||
|
@ -114,8 +114,7 @@ class TestJobDoc(unittest.TestCase):
|
|||
|
||||
def test_validate_ok_when_data_ok(self):
|
||||
job_doc = get_fake_job_0()
|
||||
res = utils.JobDoc.validate(job_doc)
|
||||
self.assertIsNone(res)
|
||||
self.assertIsNone(utils.JobDoc.validate(job_doc))
|
||||
|
||||
def test_validate_raises_BadDataFormat_when_doc_has_no_jobid(self):
|
||||
job_doc = get_fake_job_0()
|
||||
|
@ -184,8 +183,7 @@ class TestActionDoc(unittest.TestCase):
|
|||
|
||||
def test_validate_ok_when_data_ok(self):
|
||||
action_doc = get_fake_action_0()
|
||||
res = utils.ActionDoc.validate(action_doc)
|
||||
self.assertIsNone(res)
|
||||
self.assertIsNone(utils.ActionDoc.validate(action_doc))
|
||||
|
||||
def test_validate_raises_BadDataFormat_when_doc_has_no_actionid(self):
|
||||
action_doc = get_fake_action_0()
|
||||
|
@ -241,8 +239,7 @@ class TestSessionDoc(unittest.TestCase):
|
|||
|
||||
def test_validate_ok_when_data_ok(self):
|
||||
session_doc = get_fake_session_0()
|
||||
res = utils.SessionDoc.validate(session_doc)
|
||||
self.assertIsNone(res)
|
||||
self.assertIsNone(utils.SessionDoc.validate(session_doc))
|
||||
|
||||
def test_validate_raises_BadDataFormat_when_doc_has_no_sessionid(self):
|
||||
session_doc = get_fake_session_0()
|
|
@ -1,7 +1,7 @@
|
|||
elasticsearch>=1.3.0,<2.0 # Apache-2.0
|
||||
falcon>=0.1.6
|
||||
jsonschema>=2.0.0,<3.0.0,!=2.5.0
|
||||
falcon>=0.1.6 # Apache-2.0
|
||||
jsonschema>=2.0.0,<3.0.0,!=2.5.0 # MIT
|
||||
keystonemiddleware>=4.0.0 # Apache-2.0
|
||||
oslo.config>=3.2.0 # Apache-2.0
|
||||
oslo.i18n>=1.5.0 # Apache-2.0
|
||||
|
||||
six>=1.9.0 # MIT
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
astroid<1.4.0 # breaks pylint 1.4.4
|
||||
coverage>=3.6 # Apache-2.0
|
||||
flake8>=2.2.4,<=2.4.1
|
||||
mock>=1.2
|
||||
flake8>2.4.1,<2.6.0 # MIT
|
||||
pylint==1.4.5 # GNU GPL v2
|
||||
|
||||
# Not in global-requirements
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-xdist
|
||||
|
||||
hacking>=0.10.2,<0.11 # Apache-2.0
|
||||
coverage>=3.6 # Apache-2.0
|
||||
discover # BSD
|
||||
mock>=1.2 # BSD
|
||||
oslosphinx>=2.5.0,!=3.4.0 # Apache-2.0
|
||||
python-subunit>=0.0.18 # Apache-2.0/BSD
|
||||
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3 # BSD
|
||||
testrepository>=0.0.18 # Apache-2.0/BSD
|
||||
testtools>=1.4.0 # MIT
|
||||
|
|
65
tox.ini
65
tox.ini
|
@ -1,5 +1,6 @@
|
|||
[tox]
|
||||
envlist = py27,pep8,pylint
|
||||
minversion = 1.8.1
|
||||
envlist = py27,py34,pep8,pylint,docs
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
|
@ -8,16 +9,64 @@ deps =
|
|||
-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
|
||||
passenv =
|
||||
FREEZER_TEST_SSH_KEY
|
||||
FREEZER_TEST_SSH_USERNAME
|
||||
FREEZER_TEST_SSH_HOST
|
||||
FREEZER_TEST_CONTAINER
|
||||
FREEZER_TEST_OS_TENANT_NAME
|
||||
FREEZER_TEST_OS_USERNAME
|
||||
FREEZER_TEST_OS_REGION_NAME
|
||||
FREEZER_TEST_OS_PASSWORD
|
||||
FREEZER_TEST_OS_AUTH_URL
|
||||
FREEZER_TEST_NO_LVM
|
||||
http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
|
||||
|
||||
install_command = pip install -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
|
||||
commands =
|
||||
py.test -v --cov-report term-missing --cov freezer_api
|
||||
whitelist_externals =
|
||||
find
|
||||
coverage
|
||||
rm
|
||||
|
||||
[pytest]
|
||||
python_files = test_*.py
|
||||
norecursedirs = .tox .venv specs
|
||||
|
||||
[testenv:py27]
|
||||
basepython = python2.7
|
||||
setenv = OS_PATH_TEST = ./freezer_api/tests/unit
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
rm -rf .testrepository
|
||||
python setup.py test --coverage --coverage-package-name freezer_api --testr-args="{posargs}"
|
||||
coverage report -m
|
||||
rm -f .coverage
|
||||
rm -rf .testrepository
|
||||
|
||||
|
||||
[testenv:py34]
|
||||
basepython = python3.4
|
||||
setenv = OS_PATH_TEST = ./freezer_api/tests/unit
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
rm -rf .testrepository
|
||||
python setup.py test --coverage --coverage-package-name freezer_api --testr-args="{posargs}"
|
||||
coverage report -m
|
||||
rm -f .coverage
|
||||
rm -rf .testrepository
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:cover]
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
python setup.py test --coverage --coverage-package-name freezer_api --testr-args="{posargs}"
|
||||
coverage report -m
|
||||
rm -f .coverage
|
||||
rm -rf .testrepository
|
||||
|
||||
[testenv:pylint]
|
||||
commands = pylint --rcfile .pylintrc freezer_api
|
||||
|
||||
|
@ -25,6 +74,14 @@ commands = pylint --rcfile .pylintrc freezer_api
|
|||
commands = flake8 freezer_api
|
||||
|
||||
[flake8]
|
||||
# it's not a bug that we aren't using all of hacking
|
||||
# H102 -> apache2 license exists
|
||||
# H103 -> license is apache
|
||||
# H201 -> no bare excepts
|
||||
# H501 -> don't use locals() for str formatting
|
||||
# H903 -> \n not \r\n
|
||||
ignore = H
|
||||
select = H102, H103, H201, H501, H903, H201, H306, H301, H233
|
||||
show-source = True
|
||||
exclude = .venv,.tox,dist,doc,test,*egg,tests,specs,build
|
||||
|
||||
|
|
Loading…
Reference in New Issue