freezer scheduler

The freezer scheduler is to be executed
as daemon process on the client machines

It has the following responsibilities:

  * when using the api:
    - register -if necessary- as a client in the api
    - download the list of jobs from the api
    - schedule the jobs for execution
    - launch the freezer client at the scheduled time
    - collect metadata and exit codes and upload them to the api
    - periodically poll the api for new/updated jobs
    - if a job is part of a session (a coordinated group of jobs)
      it updates the session status when job starts/stops

  * when not using the api
    - load jobs configurations from files
    - schedule the jobs for execution
    - launch the freezer client at the scheduled time

The freezer scheduler can also be used to manage jobs
and sessions using the following positional parameters:

  job-list
  job-get
  job-create
  job-delete
  job-start
  job-stop
  session-list
  session-get
  session-create
  session-delete
  session-list-job
  session-add-job
  session-remove-job

or to register the client in the api using the positional parameter:

  register

Implements blueprint: freezer-scheduler-start

Change-Id: I06ae202a0f464f7240c137744a5b54d1177cabd9
This commit is contained in:
Fabrizio Vanni 2015-06-08 12:05:08 +01:00 committed by Fausto Marzi
parent e4232f242d
commit d4b9399e9b
42 changed files with 5011 additions and 544 deletions

View File

@ -0,0 +1,80 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import json
import requests
import exceptions
class ActionManager(object):
def __init__(self, client):
self.client = client
self.endpoint = self.client.endpoint + '/v1/actions/'
@property
def headers(self):
return {'X-Auth-Token': self.client.auth_token}
def create(self, doc, action_id=''):
action_id = action_id or doc.get('action_id', '')
endpoint = self.endpoint + action_id
r = requests.post(endpoint,
data=json.dumps(doc),
headers=self.headers)
if r.status_code != 201:
raise exceptions.ApiClientException(r)
action_id = r.json()['action_id']
return action_id
def delete(self, action_id):
endpoint = self.endpoint + action_id
r = requests.delete(endpoint, headers=self.headers)
if r.status_code != 204:
raise exceptions.ApiClientException(r)
def list(self, limit=10, offset=0, search=None):
data = json.dumps(search) if search else None
query = {'limit': int(limit), 'offset': int(offset)}
r = requests.get(self.endpoint, headers=self.headers,
params=query, data=data)
if r.status_code != 200:
raise exceptions.ApiClientException(r)
return r.json()['actions']
def get(self, action_id):
endpoint = self.endpoint + action_id
r = requests.get(endpoint, headers=self.headers)
if r.status_code == 200:
return r.json()
if r.status_code == 404:
return None
raise exceptions.ApiClientException(r)
def update(self, action_id, update_doc):
endpoint = self.endpoint + action_id
r = requests.patch(endpoint,
headers=self.headers,
data=json.dumps(update_doc))
if r.status_code != 200:
raise exceptions.ApiClientException(r)
return r.json()['version']

View File

@ -26,6 +26,8 @@ from openstackclient.identity import client as os_client
from backups import BackupsManager
from registration import RegistrationManager
from jobs import JobManager
from actions import ActionManager
from sessions import SessionManager
import exceptions
@ -64,6 +66,8 @@ class Client(object):
self.backups = BackupsManager(self)
self.registration = RegistrationManager(self)
self.jobs = JobManager(self)
self.actions = ActionManager(self)
self.sessions = SessionManager(self)
@cached_property
def endpoint(self):

View File

@ -67,4 +67,8 @@ class ApiClientException(Exception):
message = self.get_message_from_api_response(r) or \
self.get_message_from_response(r) or \
str(r)
try:
self.status_code = r.status_code
except:
self.status_code = None
super(ApiClientException, self).__init__(message)

View File

@ -0,0 +1,162 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import json
import requests
import exceptions
class SessionManager(object):
def __init__(self, client):
self.client = client
self.endpoint = self.client.endpoint + '/v1/sessions/'
@property
def headers(self):
return {'X-Auth-Token': self.client.auth_token}
def create(self, doc, session_id=''):
session_id = session_id or doc.get('session_id', '')
endpoint = self.endpoint + session_id
r = requests.post(endpoint,
data=json.dumps(doc),
headers=self.headers)
if r.status_code != 201:
raise exceptions.ApiClientException(r)
session_id = r.json()['session_id']
return session_id
def delete(self, session_id):
endpoint = self.endpoint + session_id
r = requests.delete(endpoint, headers=self.headers)
if r.status_code != 204:
raise exceptions.ApiClientException(r)
def list_all(self, limit=10, offset=0, search=None):
data = json.dumps(search) if search else None
query = {'limit': int(limit), 'offset': int(offset)}
r = requests.get(self.endpoint, headers=self.headers,
params=query, data=data)
if r.status_code != 200:
raise exceptions.ApiClientException(r)
return r.json()['sessions']
def list(self, limit=10, offset=0, search={}):
new_search = search.copy()
new_search['match'] = search.get('match', [])
return self.list_all(limit, offset, new_search)
def get(self, session_id):
endpoint = self.endpoint + session_id
r = requests.get(endpoint, headers=self.headers)
if r.status_code == 200:
return r.json()
if r.status_code == 404:
return None
raise exceptions.ApiClientException(r)
def update(self, session_id, update_doc):
endpoint = self.endpoint + session_id
r = requests.patch(endpoint,
headers=self.headers,
data=json.dumps(update_doc))
if r.status_code != 200:
raise exceptions.ApiClientException(r)
return r.json()['version']
def add_job(self, session_id, job_id):
# endpoint /v1/sessions/{sessions_id}/jobs/{job_id}
endpoint = '{0}{1}/jobs/{2}'.format(self.endpoint, session_id, job_id)
r = requests.put(endpoint,
headers=self.headers)
if r.status_code != 204:
raise exceptions.ApiClientException(r)
return
def remove_job(self, session_id, job_id):
# endpoint /v1/sessions/{sessions_id}/jobs/{job_id}
endpoint = '{0}{1}/jobs/{2}'.format(self.endpoint, session_id, job_id)
retry = 5
r = ''
while retry:
r = requests.delete(endpoint,
headers=self.headers)
if r.status_code == 204:
return
retry -= 1
raise exceptions.ApiClientException(r)
def start_session(self, session_id, job_id, session_tag):
"""
Informs the api that the client is starting the session
identified by the session_id and request the session_tag
to be incremented up to the requested value.
The returned session_id could be:
* current_tag + 1 if the session has started
* > current_tag + 1 if the action had already been started
by some other node and this node was out of sync
:param session_id:
:param job_id:
:param session_tag: the new session_id
:return: the response obj:
{ result: string 'running' or 'error',
'session_tag': the new session_tag )
"""
# endpoint /v1/sessions/{sessions_id}/action
endpoint = '{0}{1}/action'.format(self.endpoint, session_id)
doc = {"start": {
"job_id": job_id,
"current_tag": session_tag
}}
r = requests.post(endpoint,
headers=self.headers,
data=json.dumps(doc))
if r.status_code != 202:
raise exceptions.ApiClientException(r)
return r.json()
def end_session(self, session_id, job_id, session_tag, result):
"""
Informs the freezer service that the job has ended.
Privides information about the job's result and the session tag
:param session_id:
:param job_id:
:param session_tag:
:param result:
:return:
"""
# endpoint /v1/sessions/{sessions_id}/action
endpoint = '{0}{1}/action'.format(self.endpoint, session_id)
doc = {"end": {
"job_id": job_id,
"current_tag": session_tag,
"result": result
}}
r = requests.post(endpoint,
headers=self.headers,
data=json.dumps(doc))
if r.status_code != 202:
raise exceptions.ApiClientException(r)
return r.json()

View File

@ -64,7 +64,7 @@ DEFAULT_PARAMS = {
'restore_abs_path': False, 'log_file': None,
'upload': True, 'mode': 'fs', 'action': 'backup',
'vssadmin': True, 'shadow': '', 'shadow_path': '',
'windows_volume': '', 'command': None
'windows_volume': '', 'command': None, 'metadata_out': False
}
@ -268,6 +268,12 @@ def backup_arguments(args_dict={}):
password = <mysqlpass>
port = <db-port>''',
dest='mysql_conf', default=False)
arg_parser.add_argument(
'--metadata-out', action='store',
help=('Set the filename to which write the metadata regarding '
'the backup metrics. Use "-" to output to standard output.'),
dest='metadata_out', default=False)
if is_windows():
arg_parser.add_argument(
'--log-file', action='store',

View File

@ -19,6 +19,8 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import sys
from freezer import swift
from freezer import utils
from freezer import backup
@ -35,6 +37,9 @@ class Job:
def execute(self):
logging.info('[*] Action not implemented')
def get_metadata(self):
return None
@staticmethod
def executemethod(func):
def wrapper(self):
@ -115,6 +120,25 @@ class BackupJob(Job):
else:
raise ValueError('Please provide a valid backup mode')
def get_metadata(self):
metadata = {
'current_level': self.conf.curr_backup_level,
'fs_real_path': (self.conf.lvm_auto_snap or
self.conf.path_to_backup),
'vol_snap_path':
self.conf.path_to_backup if self.conf.lvm_auto_snap else '',
'client_os': sys.platform,
'client_version': self.conf.__version__
}
fields = ['action', 'always_level', 'backup_media', 'backup_name',
'container', 'container_segments', 'curr_backup_level',
'dry_run', 'hostname', 'path_to_backup', 'max_level',
'mode', 'meta_data_file', 'backup_name', 'hostname',
'time_stamp', 'curr_backup_level']
for field_name in fields:
metadata[field_name] = self.conf.__dict__.get(field_name, '')
return metadata
class RestoreJob(Job):
@Job.executemethod

View File

@ -31,7 +31,7 @@ import os
import subprocess
import logging
import sys
import json
# Initialize backup options
(backup_args, arg_parse) = backup_arguments()
@ -125,6 +125,11 @@ def freezer_main(args={}):
freezer_job = job.create_job(backup_args)
freezer_job.execute()
if backup_args.metadata_out == '-':
metadata = freezer_job.get_metadata()
if metadata:
sys.stdout.write(json.dumps(metadata))
return backup_args

View File

View File

@ -0,0 +1,170 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import argparse
from prettytable import PrettyTable
SCHEDULER_CONF_D = '/etc/freezer/scheduler/conf.d'
class OpenstackOptions(object):
def __init__(self, args, default_dict={}):
self.username = args.os_username or\
default_dict.get('OS_USERNAME', None)
self.tenant_name = args.os_tenant_name or\
default_dict.get('OS_TENANT_NAME', None)
self.auth_url = args.os_auth_url or\
default_dict.get('OS_AUTH_URL', None)
self.password = args.os_password or\
default_dict.get('OS_PASSWORD', None)
self.tenant_id = args.os_tenant_id or\
default_dict.get('OS_TENANT_ID', None)
self.region_name = args.os_region_name or\
default_dict.get('OS_REGION_NAME', None)
self.endpoint = args.os_endpoint or\
default_dict.get('OS_SERVICE_ENDPOINT', None)
if not self.is_valid():
raise Exception('ERROR: OS Options not valid: {0}'.
format(self.reason()))
def __str__(self):
table = PrettyTable(["variable", "value"])
table.add_row(['username', self.username])
table.add_row(['tenant_name', self.tenant_name])
table.add_row(['auth_url', self.auth_url])
table.add_row(['password', self.password])
table.add_row(['tenant_id', self.tenant_id])
table.add_row(['region_name', self.region_name])
table.add_row(['endpoint', self.endpoint])
return table.__str__()
def is_valid(self):
if self.reason():
return False
return True
def reason(self):
missing = []
for attr in ['username', 'password', 'tenant_name', 'region_name']:
if not self.__getattribute__(attr):
missing.append(attr)
if missing:
return 'missing {0}'.format(', '.join(missing))
return ''
def get_args(choices):
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
'action', action='store', default=None, choices=choices, help='')
arg_parser.add_argument(
'--debug', action='store_true',
help='Prints debugging output onto the console, this may include '
'OS environment variables, request and response calls. '
'Helpful for debugging and understanding the API calls.',
dest='debug', default=False)
arg_parser.add_argument(
'-j', '--job', action='store',
help=('name or ID of the job'),
dest='job', default=None)
arg_parser.add_argument(
'-s', '--session', action='store',
help=('name or ID of the session'),
dest='session', default=None)
arg_parser.add_argument(
'--file', action='store',
help=('Local file that contains the resource '
'to be uploaded/downloaded'),
dest='fname', default=None)
arg_parser.add_argument(
'--os-endpoint', action='store',
help=('Specify an endpoint to use instead of retrieving '
'one from the service catalog (via authentication). '
'Defaults to env[OS_SERVICE_ENDPOINT]'),
dest='os_endpoint', default=None)
arg_parser.add_argument(
'--os-username', action='store',
help=('Name used for authentication with the OpenStack '
'Identity service. Defaults to env[OS_USERNAME].'),
dest='os_username', default=None)
arg_parser.add_argument(
'--os-password', action='store',
help=('Password used for authentication with the OpenStack '
'Identity service. Defaults to env[OS_PASSWORD].'),
dest='os_password', default=None)
arg_parser.add_argument(
'--os-tenant-name', action='store',
help=('Tenant to request authorization on. Defaults to '
'env[OS_TENANT_NAME].'),
dest='os_tenant_name', default=None)
arg_parser.add_argument(
'--os-tenant-id', action='store',
help=('Tenant to request authorization on. Defaults to '
'env[OS_TENANT_ID].'),
dest='os_tenant_id', default=None)
arg_parser.add_argument(
'--os-auth-url', action='store',
help=('Specify the Identity endpoint to use for '
'authentication. Defaults to env[OS_AUTH_URL].'),
dest='os_auth_url', default=None)
arg_parser.add_argument(
'--os-region-name', action='store',
help=('Specify the region to use. Defaults to '
'env[OS_REGION_NAME].'),
dest='os_region_name', default=None)
arg_parser.add_argument(
'--os-token', action='store',
help=('Specify an existing token to use instead of retrieving'
' one via authentication (e.g. with username & password). '
'Defaults to env[OS_SERVICE_TOKEN].'),
dest='os_token', default=None)
arg_parser.add_argument(
'-c', '--client-id', action='store',
help=('Specifies the client_id used when contacting the service.'
'If not specified it will be automatically created'
'using the tenant-id and the machine hostname.'),
dest='client_id', default=None)
arg_parser.add_argument(
'-n', '--no-api', action='store_true',
help='Prevents the scheduler from using the api service',
dest='no_api', default=False)
arg_parser.add_argument(
'-a', '--active-only', action='store_true',
help='Filter only active jobs/session',
dest='active_only', default=False)
arg_parser.add_argument(
'-d', '--dir', action='store',
help=('Used to store/retrieve files on local storage, including '
'those exchanged with the api service. '
'Default value is {0}'.format(SCHEDULER_CONF_D)),
dest='jobs_dir', default=SCHEDULER_CONF_D)
arg_parser.add_argument(
'-i', '--interval', action='store',
help=('Specifies the api-polling interval in seconds.'
'Defaults to 60 seconds'),
dest='interval', default=60)
arg_parser.add_argument(
'-l', '--log-file', action='store',
help=('location of log file'),
dest='log_file', default=None)
return arg_parser.parse_args()

160
freezer/scheduler/daemon.py Normal file
View File

@ -0,0 +1,160 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import logging
import os
import signal
from tempfile import gettempdir
from time import sleep
from pep3143daemon import DaemonContext, PidFile
from freezer.utils import create_dir
class Daemon:
"""
A class to manage all the daemon-related stuff
"""
instance = None
exit_flag = False
def __init__(self, daemonizable=None, pid_fname=None):
# daemonizable has to provide start/stop (and possibly reload) methods
Daemon.instance = self
self._pid_fname = pid_fname
self.daemonizable = daemonizable
@staticmethod
def setup_logging(log_file):
class NoLogFilter(logging.Filter):
def filter(self, record):
return False
def configure_logging(file_name):
expanded_file_name = os.path.expanduser(file_name)
expanded_dir_name = os.path.dirname(expanded_file_name)
create_dir(expanded_dir_name, do_log=False)
logging.basicConfig(
filename=expanded_file_name,
level=logging.INFO,
format=('%(asctime)s %(name)s %(levelname)s %(message)s'))
# filter out some annoying messages
# not the best position for this code
log_filter = NoLogFilter()
logging.getLogger("apscheduler.scheduler").\
addFilter(log_filter)
logging.getLogger("apscheduler.executors.default").\
addFilter(log_filter)
logging.getLogger("requests.packages.urllib3.connectionpool").\
addFilter(log_filter)
return expanded_file_name
log_file_paths = [log_file] if log_file else [
'/var/log/freezer-scheduler.log',
'~/.freezer/freezer-scheduler.log']
for file_name in log_file_paths:
try:
return configure_logging(file_name)
except IOError:
pass
raise Exception("Unable to write to log file")
@staticmethod
def handle_program_exit(signum, frame):
Daemon.exit_flag = True
Daemon.instance.daemonizable.stop()
@staticmethod
def handle_reload(signum, frame):
Daemon.instance.daemonizable.reload()
@property
def signal_map(self):
return {
signal.SIGTERM: Daemon.handle_program_exit,
signal.SIGHUP: Daemon.handle_reload,
}
@property
def pid_fname(self):
if not self._pid_fname:
fname = '{0}/freezer_sched_{1}.pid'.format(
gettempdir(),
os.path.split(os.path.expanduser('~'))[-1])
self._pid_fname = os.path.normpath(fname)
return self._pid_fname
@property
def pid(self):
if os.path.isfile(self.pid_fname):
with open(self.pid_fname, 'r') as f:
return int(f.read())
return None
@property
def jobs_file(self):
return ''
@property
def no_api(self):
return False
def start(self, log_file=None):
pidfile = PidFile(self.pid_fname)
with DaemonContext(pidfile=pidfile, signal_map=self.signal_map):
self.setup_logging(log_file)
while not Daemon.exit_flag:
try:
logging.info('[*] freezer daemon starting, pid: {0}'.
format(self.pid))
self.daemonizable.start()
Daemon.exit_flag = True
except Exception as e:
logging.error('[*] Restarting daemonized procedure '
'after Fatal Error: {0}'.format(e))
sleep(10)
logging.info('[*] freezer daemon done, pid: {0}'.format(self.pid))
def stop(self):
pid = self.pid
if pid:
os.kill(self.pid, signal.SIGTERM)
else:
print "Not Running"
def status(self):
pid = self.pid
if pid:
print "Running with pid: {0}".format(pid)
else:
print "Not Running"
def reload(self):
pid = self.pid
if pid:
os.kill(pid, signal.SIGHUP)
else:
print "Not Running"

View File

@ -0,0 +1,229 @@
#!/usr/bin/env python
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import logging
import os
import sys
import threading
from distutils import spawn
from apscheduler.schedulers.blocking import BlockingScheduler
from freezer.apiclient import client
import arguments
import shell
import utils
from freezer.utils import create_dir
from daemon import Daemon
from scheduler_job import Job
class FreezerScheduler(object):
def __init__(self, apiclient, interval, job_path):
# config_manager
self.client = apiclient
self.freezerc_executable = spawn.find_executable('freezerc')
self.job_path = job_path
self._client = None
self.lock = threading.Lock()
self.execution_lock = threading.Lock()
job_defaults = {
'coalesce': True,
'max_instances': 1
}
self.scheduler = BlockingScheduler(job_defaults=job_defaults)
if self.client:
self.scheduler.add_job(self.poll, 'interval',
seconds=interval, id='api_poll')
self.add_job = self.scheduler.add_job
self.remove_job = self.scheduler.remove_job
self.jobs = {}
def get_jobs(self):
if self.client:
job_doc_list = []
try:
job_doc_list = utils.get_active_jobs_from_api(self.client)
except Exception as e:
logging.error('Unable to get jobs from freezer api service. '
'{0}'.format(e))
try:
utils.save_jobs_to_disk(job_doc_list, self.job_path)
except Exception as e:
logging.error('Unable to save jobs to {0}. '
'{1}'.format(self.job_path, e))
return job_doc_list
else:
return utils.get_jobs_from_disk(self.job_path)
def start_session(self, session_id, job_id, session_tag):
if self.client:
return self.client.sessions.start_session(session_id,
job_id,
session_tag)
else:
raise Exception("Unable to start session: api not in use.")
def end_session(self, session_id, job_id, session_tag, result):
if self.client:
return self.client.sessions.end_session(session_id,
job_id,
session_tag,
result)
else:
raise Exception("Unable to end session: api not in use.")
def upload_metadata(self, metadata_doc):
if self.client:
self.client.backups.create(metadata_doc)
def start(self):
utils.do_register(self.client)
self.poll()
self.scheduler.start()
def update_job(self, job_id, job_doc):
if self.client:
try:
return self.client.jobs.update(job_id, job_doc)
except Exception as e:
logging.error("[*] Job update error: {0}".format(e))
def update_job_status(self, job_id, status):
doc = {'job_schedule': {'status': status}}
self.update_job(job_id, doc)
def is_scheduled(self, job_id):
return self.scheduler.get_job(job_id) is not None
def create_job(self, job_doc):
job = Job.create(self, self.freezerc_executable, job_doc)
if job:
self.jobs[job.id] = job
logging.info("Created job {0}".format(job.id))
return job
def poll(self):
work_job_doc_list = []
try:
work_job_doc_list = self.get_jobs()
except Exception as e:
logging.error("[*] Unable to get jobs: {0}".format(e))
work_job_id_list = []
# create job if necessary, then let it process its events
for job_doc in work_job_doc_list:
job_id = job_doc['job_id']
work_job_id_list.append(job_id)
job = self.jobs.get(job_id, None) or self.create_job(job_doc)
if job:
job.process_event(job_doc)
# request removal of any job that has been removed in the api
for job_id, job in self.jobs.iteritems():
if job_id not in work_job_id_list:
job.remove()
remove_list = [job_id for job_id, job in self.jobs.items()
if job.can_be_removed()]
for k in remove_list:
self.jobs.pop(k)
def stop(self):
try:
self.scheduler.shutdown(wait=False)
except:
pass
def reload(self):
logging.warning("reload not supported")
def _get_doers(module):
doers = {}
for attr in (a for a in dir(module) if a.startswith('do_')):
command = attr[3:].replace('_', '-')
callback = getattr(module, attr)
doers[command] = callback
return doers
def main():
doers = _get_doers(shell)
doers.update(_get_doers(utils))
possible_actions = doers.keys() + ['start', 'stop', 'status']
args = arguments.get_args(possible_actions)
if args.action is None:
print "No action"
sys.exit(1)
os_options = arguments.OpenstackOptions(args, os.environ)
if args.debug:
print os_options
apiclient = None
if args.no_api is False:
apiclient = client.Client(username=os_options.username,
password=os_options.password,
tenant_name=os_options.tenant_name,
endpoint=os_options.endpoint,
auth_url=os_options.auth_url)
if args.client_id:
apiclient.client_id = args.client_id
create_dir(args.jobs_dir, do_log=False)
if args.action in doers:
try:
return doers[args.action](apiclient, args)
except Exception as e:
print ('ERROR {0}'.format(e))
return 1
freezer_scheduler = FreezerScheduler(apiclient=apiclient,
interval=int(args.interval),
job_path=args.jobs_dir)
daemon = Daemon(daemonizable=freezer_scheduler)
if args.action == 'start':
daemon.start(log_file=args.log_file)
elif args.action == 'stop':
daemon.stop()
elif args.action == 'reload':
daemon.reload()
elif args.action == 'status':
daemon.status()
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,440 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import logging
import subprocess
import tempfile
import datetime
import json
import time
from ConfigParser import ConfigParser
class StopState(object):
@staticmethod
def stop(job, doc):
job.job_doc = doc
job.event = Job.NO_EVENT
job.job_doc_status = Job.STOP_STATUS
job.scheduler.update_job(job.id, job.job_doc)
return Job.NO_EVENT
@staticmethod
def abort(job, doc):
job.job_doc = doc
job.event = Job.NO_EVENT
job.job_doc_status = Job.STOP_STATUS
job.scheduler.update_job(job.id, job.job_doc)
return Job.NO_EVENT
@staticmethod
def start(job, doc):
job.job_doc = doc
job.event = Job.NO_EVENT
job.job_doc_status = Job.STOP_STATUS
job.schedule()
job.scheduler.update_job(job.id, job.job_doc)
return Job.NO_EVENT
@staticmethod
def remove(job):
job.unschedule()
job.job_doc_status = Job.REMOVED_STATUS
return Job.NO_EVENT
class ScheduledState(object):
@staticmethod
def stop(job, doc):
job.unschedule()
job.scheduler.update_job(job.id, job.job_doc)
return Job.STOP_EVENT
@staticmethod
def abort(job, doc):
return ScheduledState.stop(job, doc)
@staticmethod
def start(job, doc):
job.event = Job.NO_EVENT
job.scheduler.update_job(job.id, job.job_doc)
return Job.NO_EVENT
@staticmethod
def remove(job):
job.unschedule()
job.job_doc_status = Job.REMOVED_STATUS
return Job.NO_EVENT
class RunningState(object):
@staticmethod
def stop(job, doc):
job.event = Job.STOP_EVENT
return Job.NO_EVENT
@staticmethod
def abort(job, doc):
job.event = Job.ABORT_EVENT
return Job.NO_EVENT
@staticmethod
def start(job, doc):
job.event = Job.NO_EVENT
job.scheduler.update_job(job.id, job.job_doc)
return Job.NO_EVENT
@staticmethod
def remove(job):
job.event = Job.REMOVE_EVENT
return Job.NO_EVENT
class Job(object):
NO_EVENT = ''
STOP_EVENT = 'stop'
START_EVENT = 'start'
ABORT_EVENT = 'abort'
REMOVE_EVENT = 'remove'
STOP_STATUS = 'stop'
SCHEDULED_STATUS = 'scheduled'
RUNNING_STATUS = 'running'
REMOVED_STATUS = 'removed'
COMPLETED_STATUS = 'completed'
FAIL_RESULT = 'fail'
SUCCESS_RESULT = 'success'
ABORTED_RESULT = 'aborted'
@staticmethod
def create(scheduler, executable, job_doc):
job = Job(scheduler, executable, job_doc)
if job.job_doc_status in ['running', 'scheduled']:
logging.warning('Job {0} already has {1} status, '
'skipping'.format(job.id, job.job_doc_status))
return None
if not job.event and not job.job_doc_status:
logging.warning('Autostart Job {0}'.format(job.id))
job.event = Job.START_EVENT
return job
def __init__(self, scheduler, executable, job_doc):
self.scheduler = scheduler
self.executable = executable
self.job_doc = job_doc
self.process = None
self.state = StopState
def remove(self):
with self.scheduler.lock:
# delegate to state object
logging.info('REMOVE job {0}'.format(self.id))
self.state.remove(self)
@property
def id(self):
return self.job_doc['job_id']
@property
def session_id(self):
return self.job_doc.get('session_id', '')
@session_id.setter
def session_id(self, value):
self.job_doc['session_id'] = value
@property
def session_tag(self):
return self.job_doc.get('session_tag', 0)
@session_tag.setter
def session_tag(self, value):
self.job_doc['session_tag'] = value
@property
def event(self):
return self.job_doc['job_schedule'].get('event', '')
@event.setter
def event(self, value):
self.job_doc['job_schedule']['event'] = value
@property
def job_doc_status(self):
return self.job_doc['job_schedule'].get('status', '')
@job_doc_status.setter
def job_doc_status(self, value):
self.job_doc['job_schedule']['status'] = value
@property
def result(self):
return self.job_doc['job_schedule'].get('result', '')
@result.setter
def result(self, value):
self.job_doc['job_schedule']['result'] = value
def can_be_removed(self):
return self.job_doc_status == Job.REMOVED_STATUS
@staticmethod
def save_action_to_file(action, f):
parser = ConfigParser()
parser.add_section('action')
for action_k, action_v in action.items():
parser.set('action', action_k, action_v)
parser.write(f)
f.seek(0)
@property
def schedule_date(self):
return self.job_doc['job_schedule'].get('schedule_date', '')
@property
def schedule_interval(self):
return self.job_doc['job_schedule'].get('schedule_interval', '')
@property
def schedule_cron_fields(self):
cron_fields = ['year', 'month', 'day', 'week', 'day_of_week',
'hour', 'minute', 'second']
return {key: value
for key, value in self.job_doc['job_schedule'].items()
if key in cron_fields}
@property
def scheduled(self):
return self.scheduler.is_scheduled(self.id)
def get_schedule_args(self):
if self.schedule_date:
return {'trigger': 'date',
'run_date': self.schedule_date}
elif self.schedule_interval:
kwargs = {'trigger': 'interval'}
if self.schedule_interval == 'continuous':
kwargs.update({'seconds': 1})
else:
val, unit = self.schedule_interval.split(' ')
kwargs.update({unit: int(val)})
return kwargs
else:
cron_fields = self.schedule_cron_fields
if cron_fields:
return {'trigger': 'cron'}.update(cron_fields)
# no scheduling information, schedule to start within a few seconds
return {'trigger': 'date',
'run_date': datetime.datetime.now() +
datetime.timedelta(0, 2, 0)}
def process_event(self, job_doc):
with self.scheduler.lock:
next_event = job_doc['job_schedule'].get('event', '')
while next_event:
if next_event == Job.STOP_EVENT:
logging.info('JOB {0} event: STOP'.format(self.id))
next_event = self.state.stop(self, job_doc)
elif next_event == Job.START_EVENT:
logging.info('JOB {0} event: START'.format(self.id))
next_event = self.state.start(self, job_doc)
elif next_event == Job.ABORT_EVENT:
logging.info('JOB {0} event: ABORT'.format(self.id))
next_event = self.state.abort(self, job_doc)
def upload_metadata(self, metadata_string):
try:
metadata = json.loads(metadata_string)
if metadata:
self.scheduler.upload_metadata(metadata)
except Exception as e:
logging.error('[*] metrics upload error: {0}'.format(e))
logging.info("[*] Job {0}, freezer action metadata uploaded")
def execute_job_action(self, job_action):
max_retries = job_action.get('max_retries', 1)
tries = max_retries
freezer_action = job_action.get('freezer_action', {})
max_retries_interval = job_action.get('max_retries_interval', 60)
action_name = freezer_action.get('action', '')
while tries:
with tempfile.NamedTemporaryFile() as config_file:
self.save_action_to_file(freezer_action, config_file)
freezer_command = 'python {0} --metadata-out - --config {1}'.\
format(self.executable, config_file.name)
self.process = subprocess.Popen(freezer_command.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, error = self.process.communicate()
if error:
logging.error("[*] Freezer client error: {0}".format(error))
elif output:
self.upload_metadata(output)
if self.process.returncode:
# ERROR
tries -= 1
if tries:
logging.warning('[*] Job {0} failed {1} action,'
' retrying in {2} seconds'
.format(self.id, action_name,
max_retries_interval))
# sleeping with the bloody lock, but we don't want other
# actions to mess with our stuff like fs snapshots, do we ?
time.sleep(max_retries_interval)
else:
# SUCCESS
logging.info('[*] Job {0} action {1}'
' returned success exit code'.
format(self.id, action_name))
return Job.SUCCESS_RESULT
logging.error('[*] Job {0} action {1} failed after {2} tries'
.format(self.id, action_name, max_retries))
return Job.FAIL_RESULT
def execute(self):
result = Job.SUCCESS_RESULT
with self.scheduler.execution_lock:
with self.scheduler.lock:
logging.info('job {0} running'.format(self.id))
self.state = RunningState
self.job_doc_status = Job.RUNNING_STATUS
self.scheduler.update_job_status(self.id, self.job_doc_status)
self.start_session()
for job_action in self.job_doc.get('job_actions', []):
if job_action.get('mandatory', False) or\
(result == Job.SUCCESS_RESULT):
result = self.execute_job_action(job_action)
else:
freezer_action = job_action.get('freezer_action', {})
action_name = freezer_action.get('action', '')
logging.warning("[*]skipping {0} action".
format(action_name))
self.result = result
self.finish()
def finish(self):
self.end_session(self.result)
with self.scheduler.lock:
if self.event == Job.REMOVE_EVENT:
self.unschedule()
self.job_doc_status = Job.REMOVED_STATUS
return
if not self.scheduled:
self.job_doc_status = Job.COMPLETED_STATUS
self.state = StopState
self.scheduler.update_job(self.id, self.job_doc)
return
if self.event in [Job.STOP_EVENT, Job.ABORT_EVENT]:
self.unschedule()
self.job_doc_status = Job.COMPLETED_STATUS
self.scheduler.update_job(self.id, self.job_doc)
else:
self.job_doc_status = Job.SCHEDULED_STATUS
self.state = ScheduledState
self.scheduler.update_job_status(self.id, self.job_doc_status)
def start_session(self):
if not self.session_id:
return
retry = 5
while retry:
try:
resp = self.scheduler.start_session(self.session_id,
self.id,
self.session_tag)
if resp['result'] == 'success':
self.session_tag = resp['session_tag']
return
except Exception as e:
logging.error('[*]Error while starting session {0}. {1}'.
format(self.session_id, e))
logging.warning('[*]Retrying to start session {0}'.
format(self.session_id))
retry -= 1
logging.error('[*]Unable to start session {0}'.format(self.session_id))
def end_session(self, result):
if not self.session_id:
return
retry = 5
while retry:
try:
resp = self.scheduler.end_session(self.session_id,
self.id,
self.session_tag,
result)
if resp['result'] == 'success':
return
except Exception as e:
logging.error('[*]Error while ending session {0}. {1}'.
format(self.session_id, e))
logging.warning('[*]Retrying to end session {0}'.
format(self.session_id))
retry -= 1
logging.error('[*]Unable to end session {0}'.format(self.session_id))
def schedule(self):
try:
kwargs = self.get_schedule_args()
self.scheduler.add_job(self.execute, id=self.id, **kwargs)
except Exception as e:
logging.error("[*] Unable to schedule job {0}: {1}".
format(self.id, e))
if self.scheduled:
self.job_doc_status = Job.SCHEDULED_STATUS
self.state = ScheduledState
else:
# job not scheduled or already started and waiting for lock
self.job_doc_status = Job.COMPLETED_STATUS
self.state = StopState
def unschedule(self):
try:
# already executing job are not present in the apscheduler list
self.scheduler.remove_job(job_id=self.id)
except:
pass
self.event = Job.NO_EVENT
self.job_doc_status = Job.STOP_STATUS
self.state = StopState
def terminate(self):
if self.process:
self.process.terminate()
def kill(self):
if self.process:
self.process.kill()

236
freezer/scheduler/shell.py Normal file
View File

@ -0,0 +1,236 @@
#!/usr/bin/env python
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import json
import os
import utils
from freezer.utils import create_dir
from prettytable import PrettyTable
try:
from betterprint import pprint
except:
def pprint(doc):
print json.dumps(doc, indent=4)
def do_session_remove_job(client, args):
"""
remove the job from the session
"""
if not args.session_id:
raise Exception("Parameter --session required")
if not args.job:
raise Exception("Parameter --job required")
client.sessions.remove_job(args.session_id, args.job)
def do_session_add_job(client, args):
"""
adds a job to the session
"""
if not args.session_id:
raise Exception("Parameter --session required")
if not args.job:
raise Exception("Parameter --job required")
for job_id in args.job.split(','):
try:
client.sessions.add_job(args.session_id, job_id)
except Exception as e:
print "Error processin job {0}. {1}".format(job_id, e)
def do_session_list_job(client, args):
"""
prints a list of jobs assigned to the specific session
:return: None
"""
if not args.session_id:
raise Exception("Parameter --session required")
session_doc = client.sessions.get(args.session_id)
jobs = session_doc.get('jobs', {})
table = PrettyTable(["job_id", "status", "result", "client_id"])
for job_id, job_data in jobs.iteritems():
table.add_row([job_id,
job_data['status'],
job_data['result'],
job_data['client_id']
])
print table
def do_session_delete(client, args):
if not args.session_id:
raise Exception("Parameter --session required")
client.sessions.delete(args.session_id)
def do_session_create(client, args):
"""
creates a session object loading it from disk
:return: None
"""
if not args.fname:
raise Exception("Parameter --file required")
session_doc = utils.load_doc_from_json_file(args.fname)
session_id = client.sessions.create(session_doc)
print "Created session {0}".format(session_id)
def do_session_get(client, args):
"""
gets a specific session object and saves it to file. If file is not
specified the session obj is printed.
:return: None
"""
if not args.session_id:
raise Exception("Parameter --session required")
session_doc = client.sessions.get(args.session_id)
if args.fname:
utils.save_doc_to_json_file(session_doc, args.fname)
else:
pprint(session_doc)
def do_session_list(client, args):
"""
print a list of all jobs
:return: None
"""
table = PrettyTable(["session_id", "tag", "status",
"description", "jobs", "last_start"])
l = client.sessions.list()
while l:
offset = len(l)
for doc in l:
table.add_row([doc['session_id'],
doc['session_tag'],
doc['status'],
doc.get('description', ''),
len(doc.get('jobs', [])),
doc['last_start']])
l = client.sessions.list(offset=offset)
print table
def do_job_create(client, args):
if not args.fname:
raise Exception("Parameter --file required")
job_doc = utils.load_doc_from_json_file(args.fname)
job_id = client.jobs.create(job_doc)
print "Created job {0}".format(job_id)
def do_job_delete(client, args):
if not args.job:
raise Exception("Parameter --job required")
client.jobs.delete(args.job)
def do_job_get(client, args):
if not args.job:
raise Exception("Parameter --job required")
job_doc = client.jobs.get(args.job)
if args.fname:
args.save_doc_to_file(job_doc, args.fname)
else:
pprint(job_doc)
def do_job_start(client, args):
if not args.job:
raise Exception("Parameter --job required")
client.jobs.start_job(args.job)
print "Job {0} started".format(args.job)
def do_job_stop(client, args):
if not args.job:
raise Exception("Parameter --job required")
client.jobs.stop_job(args.job)
print "Job {0} stopped".format(args.job)
def do_job_download(client, args):
create_dir(args.jobs_dir, do_log=True)
for doc in _job_list(client, args):
fname = os.path.normpath('{0}/job_{1}.conf'.
format(args.jobs_dir, doc['job_id']))
try:
utils.save_doc_to_json_file(doc, fname)
except:
print "Unable to write to file {0}".format(fname)
def do_job_upload(client, args):
for job_doc in utils.get_jobs_from_disk(args.jobs_dir):
job_id = client.jobs.create(job_doc)
print "Uploaded job {0}".format(job_id)
def _job_list(client, args):
search = {}
if args.active_only:
search = {"match_not": [{"status": "completed"}]}
l = client.jobs.list(search=search)
while l:
offset = len(l)
for doc in l:
yield doc
l = client.jobs.list(offset=offset, search=search)
raise StopIteration
def do_job_list(client, args):
table = PrettyTable(["job_id", "description", "# actions",
"status", "event", "result", "session_id"])
for doc in _job_list(client, args):
job_scheduling = doc.get('job_schedule', {})
job_status = job_scheduling.get('status', '')
job_event = job_scheduling.get('event', '')
job_result = job_scheduling.get('result', '')
table.add_row([doc['job_id'],
doc.get('description', ''),
len(doc.get('job_actions', [])),
job_status,
job_event,
job_result,
doc.get('session_id', '')])
print table
def do_client_list(client, args):
table = PrettyTable(["client_id", "hostname", "description"])
l = client.registration.list()
while l:
offset = len(l)
for doc in l:
client_doc = doc['client']
table.add_row([client_doc['client_id'],
client_doc.get('hostname', ''),
client_doc.get('description', '')])
l = client.registration.list(offset=offset)
print table

100
freezer/scheduler/utils.py Normal file
View File

@ -0,0 +1,100 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import json
import os
import socket
import freezer.apiclient.exceptions
CONFIG_FILE_EXT = '.conf'
def do_register(client, args=None):
if client:
client_info = {
"client_id": client.client_id,
"hostname": socket.gethostname()
}
try:
client.registration.create(client_info)
except freezer.apiclient.exceptions.ApiClientException as e:
if e.status_code == 409:
print "Client already registered"
return os.EX_CANTCREAT
return os.EX_OK
def find_config_files(path):
expanded_path = os.path.expanduser(path)
if os.path.isfile(expanded_path):
return [expanded_path]
file_list = []
if os.path.isdir(expanded_path):
for fname in os.walk(expanded_path).next()[2]:
if CONFIG_FILE_EXT.upper() == os.path.splitext(fname)[1].upper():
file_list.append('{0}/{1}'.format(expanded_path, fname))
return file_list
raise Exception("unable to find job files at the provided path "
"{0}".format(path))
def load_doc_from_json_file(fname, debug=False):
with open(fname, 'rb') as fd:
try:
doc = json.load(fd)
except Exception as e:
raise Exception("Unable to load conf file. {0}".format(e))
if debug:
print "File {0} loaded: ".format(fname)
return doc
def save_doc_to_json_file(doc, fname, debug=False):
with open(fname, 'w') as fd:
json.dump(doc, fd, indent=4)
if debug:
print 'Saved doc to file: {0}'.format(fname)
def get_jobs_from_disk(path):
job_doc_list = [
load_doc_from_json_file(f) for f in find_config_files(path)]
return [x for x in job_doc_list if x]
def save_jobs_to_disk(job_doc_list, path):
for doc in job_doc_list:
fname = os.path.normpath('{0}/job_{1}.conf'.
format(path, doc['job_id']))
save_doc_to_json_file(doc, fname)
def get_active_jobs_from_api(client):
# might raise
search = {"match_not": [{"status": "completed"}]}
job_list, offset = [], 0
while True:
jobs = client.jobs.list(limit=10, offset=offset, search=search)
job_list.extend(jobs)
if len(jobs) < 10:
break
offset += len(jobs)
return job_list

View File

@ -451,12 +451,9 @@ def get_newest_backup(hostname, backup_name, remote_match_backup):
raise Exception("remote match backups are empty")
sorted_backups_list = sort_backup_list(remote_match_backup)
print sorted_backups_list
for remote_obj in sorted_backups_list:
obj_name_match = re.search(r'^{0}_({1})_(\d+)_\d+?$'.format(
hostname, backup_name), remote_obj, re.I)
print obj_name_match
if not obj_name_match:
continue
tar_metadata_obj = 'tar_metadata_{0}'.format(remote_obj)

View File

@ -3,44 +3,56 @@ Freezer API
===========
Installation
============
1. Installation
===============
1.1 Install required packages
-----------------------------
::
Install required packages
-------------------------
# pip install keystonemiddleware falcon
Elasticsearch support::
Elasticsearch support
::
# pip install elasticsearch
Install freezer_api
-------------------
1.2 Install freezer_api
-----------------------
::
# git clone https://github.com/stackforge/freezer.git
# cd freezer/freezer_api && sudo python setup.py install
this will install into /usr/local
edit config file
----------------
1.3 edit config file
--------------------
::
# sudo vi /etc/freezer-api.conf
run simple instance
-------------------
1.4 run simple instance
-----------------------
::
# freezer-api
examples running using uwsgi
----------------------------
1.5 examples running using uwsgi
--------------------------------
::
# uwsgi --http :9090 --need-app --master --module freezer_api.cmd.api:application
# uwsgi --https :9090,foobar.crt,foobar.key --need-app --master --module freezer_api.cmd.api:application
Concepts and definitions
========================
2. Concepts and definitions
===========================
*hostname* is _probably_ going to be the host fqdn.
@ -52,145 +64,176 @@ identifies a backup
defined as "container_hostname_backupname" identifies a group of related
backups which share the same container,hostname and backupname
*backup_session*
is a group of backups which share container,hostname and backupname, but
are also related by dependency.
*backup_session_id*
utilizes the timestamp of the first (level 0) backup in the session
It is identified by (container, hostname, backupname, timestamp-of-level-0)
3. API registration
===================
::
keystone user-create --name freezer --pass FREEZER_PWD
keystone user-role-add --user freezer --tenant service --role admin
keystone service-create --name freezer --type backup \
--description "Freezer Backup Service"
keystone endpoint-create \
--service-id $(keystone service-list | awk '/ backup / {print $2}') \
--publicurl http://freezer_api_publicurl:port \
--internalurl http://freezer_api_internalurl:port \
--adminurl http://freezer_api_adminurl:port \
--region regionOne
API registration
================
keystone user-create --name freezer --pass FREEZER_PWD
keystone user-role-add --user freezer --tenant service --role admin
keystone service-create --name freezer --type backup \
--description "Freezer Backup Service"
keystone endpoint-create \
--service-id $(keystone service-list | awk '/ backup / {print $2}') \
--publicurl http://freezer_api_publicurl:port \
--internalurl http://freezer_api_internalurl:port \
--adminurl http://freezer_api_adminurl:port \
--region regionOne
API routes
==========
4. API routes
=============
General
-------
GET / List API version
GET /v1 JSON Home document, see http://tools.ietf.org/html/draft-nottingham-json-home-03
::
GET / List API version
GET /v1 JSON Home document, see http://tools.ietf.org/html/draft-nottingham-json-home-03
Backup metadata
---------------
GET /v1/backups(?limit,marker) Lists backups
POST /v1/backups Creates backup entry
::
GET /v1/backups/{backup_id} Get backup details
UPDATE /v1/backups/{backup_id} Updates the specified backup
DELETE /v1/backups/{backup_id} Deletes the specified backup
GET /v1/backups(?limit,marker) Lists backups
POST /v1/backups Creates backup entry
GET /v1/backups/{backup_id} Get backup details
UPDATE /v1/backups/{backup_id} Updates the specified backup
DELETE /v1/backups/{backup_id} Deletes the specified backup
Freezer clients management
--------------------------
GET /v1/clients(?limit,offset) Lists registered clients
POST /v1/clients Creates client entry
::
GET /v1/clients/{freezerc_id} Get client details
UPDATE /v1/clients/{freezerc_id} Updates the specified client information
DELETE /v1/clients/{freezerc_id} Deletes the specified client information
GET /v1/clients(?limit,offset) Lists registered clients
POST /v1/clients Creates client entry
GET /v1/clients/{freezerc_id} Get client details
UPDATE /v1/clients/{freezerc_id} Updates the specified client information
DELETE /v1/clients/{freezerc_id} Deletes the specified client information
Freezer jobs management
-----------------------
GET /v1/jobs(?limit,offset) Lists registered jobs
POST /v1/jobs Creates job entry
::
GET /v1/jobs/{jobs_id} Get job details
POST /v1/jobs/{jobs_id} creates or replaces a job entry using the specified job_id
UPDATE /v1/jobs/{jobs_id} Updates the existing job information
DELETE /v1/jobs/{jobs_id} Deletes the specified job information
PATCH /v1/jobs/{jobs_id} Updates part of the document
GET /v1/jobs(?limit,offset) Lists registered jobs
POST /v1/jobs Creates job entry
Data Structures
===============
GET /v1/jobs/{jobs_id} Get job details
POST /v1/jobs/{jobs_id} creates or replaces a job entry using the specified job_id
DELETE /v1/jobs/{jobs_id} Deletes the specified job information
PATCH /v1/jobs/{jobs_id} Updates part of the document
Backup metadata structure
-------------------------
Freezer actions management
--------------------------
::
GET /v1/actions(?limit,offset) Lists registered action
POST /v1/actions Creates action entry
GET /v1/actions/{actions_id} Get action details
POST /v1/actions/{actions_id} creates or replaces a action entry using the specified action_id
DELETE /v1/actions/{actions_id} Deletes the specified action information
PATCH /v1/actions/{actions_id} Updates part of the action document
Freezer sessions management
---------------------------
::
GET /v1/sessions(?limit,offset) Lists registered session
POST /v1/sessions Creates session entry
GET /v1/sessions/{sessions_id} Get session details
POST /v1/sessions/{sessions_id} creates or replaces a session entry using the specified session_id
DELETE /v1/sessions/{sessions_id} Deletes the specified session information
PATCH /v1/sessions/{sessions_id} Updates part of the session document
POST /v1/sessions/{sessions_id}/action requests actions (e.g. start/end) upon a specific session
PUT /v1/sessions/{sessions_id}/jobs/{job_id} adds the job to the session
DELETE /v1/sessions/{sessions_id}/jobs/{job_id} adds the job to the session
5. Backup metadata structure
============================
NOTE: sizes are in MB
::
backup_metadata:=
{
"container": string,
"host_name": string, # fqdn, client has to provide consistent information here !
"backup_name": string,
"timestamp": int,
"level": int,
"backup_session": int,
"max_level": int,
"mode" : string, (fs mongo mysql)
"fs_real_path": string,
"vol_snap_path": string,
"total_broken_links" : int,
"total_fs_files" : int,
"total_directories" : int,
"backup_size_uncompressed" : int,
"backup_size_compressed" : int,
"total_backup_session_size" : int,
"compression_alg": string, (gzip bzip xz)
"encrypted": bool,
"client_os": string
"broken_links" : [string, string, string],
"excluded_files" : [string, string, string]
"cli": string, equivalent cli used when executing the backup ?
"version": string
}
backup_metadata:=
{
"container": string,
"host_name": string, # fqdn, client has to provide consistent information here !
"backup_name": string,
"timestamp": int,
"level": int,
"max_level": int,
"mode" : string, (fs mongo mysql)
"fs_real_path": string,
"vol_snap_path": string,
"total_broken_links" : int,
"total_fs_files" : int,
"total_directories" : int,
"backup_size_uncompressed" : int,
"backup_size_compressed" : int,
"compression_alg": string, (gzip bzip xz)
"encrypted": bool,
"client_os": string
"broken_links" : [string, string, string],
"excluded_files" : [string, string, string]
"cli": string, equivalent cli used when executing the backup ?
"version": string
}
The api wraps backup_metadata dictionary with some additional information.
It stores and returns the information provided in this form:
{
"backup_id": string # container_hostname_backupname_timestamp_level
"user_id": string, # owner of the backup metadata (OS X-User-Id, keystone provided)
"user_name": string # owner of the backup metadata (OS X-User-Name, keystone provided)
::
"backup_metadata": { #--- actual backup_metadata provided
"container": string,
"host_name": string,
"backup_name": string,
"timestamp": int,
...
}
}
{
"backup_id": string # container_hostname_backupname_timestamp_level
"user_id": string, # owner of the backup metadata (OS X-User-Id, keystone provided)
"user_name": string # owner of the backup metadata (OS X-User-Name, keystone provided)
"backup_metadata": { #--- actual backup_metadata provided
"container": string,
"host_name": string,
"backup_name": string,
"timestamp": int,
...
}
}
Freezer Client document structure
---------------------------------
6. Freezer Client document structure
====================================
Identifies a freezer client for the purpose of sending action
# client_info document contains information relevant for client identification
client_info:=
{
"client_id": string actually a concatenation "tenant-id_hostname"
"hostname": string
"description": string
"config_id": string # configuration in use by the client
}
client_info document contains information relevant for client identification::
client_info:=
{
"client_id": string actually a concatenation "tenant-id_hostname"
"hostname": string
"description": string
"uuid":
}
# client_type document embeds the client_info and adds user_id
client_type :=
{
"client" : client_info document,
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
}
client_type document embeds the client_info and adds user_id::
client_type :=
{
"client" : client_info document,
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
}
Jobs
----
7. Jobs
=======
A job describes a single action to be executed by a freezer client, for example a backup, or a restore.
It contains the necessary information as if they were provided on the command line.
@ -199,54 +242,75 @@ job_id, user_id, client_id, status, scheduling information etc
Scheduling information enables future/recurrent execution of jobs
+---------------------+
| Job |
| | job_action +-------------------+
| +job_id +------------------>| job action dict |
| +client_id | +-------------------+
| +user_id |
| +description | job_schedule
| +---------------+
| | | +-------------------+
+---------------------+ +-->| job schedule dict |
+-------------------+
::
+---------------------+
| Job |
+---------------------+ job_actions +--------------+
| +---------------->| job_action |
| +job_id | 0..* +--------------+ freezer_action
| +client_id | | +mandatory |-------------+
| +user_id | | +retries | | +----------------+
| +description | job_schedule +--------------+ +->| freezer_action |
| +---------------+ +----------------+
| | | +-------------------+
+---------------------+ +-->| job schedule dict |
+-------------------+
job document structure:
job document structure::
"job": {
"job_action": { parameters for freezer to execute a specific action }
"job_schedule": { scheduling information }
"job_id": string
"client_id": string
"user_id": string
"description": string
}
"job": {
"job_action": { parameters for freezer to execute a specific action }
"job_schedule": { scheduling information }
"job_id": string
"client_id": string
"user_id": string
"description": string
}
"job_action": {
{
"action" : string
"mode" : string
"src_file" : string
"backup_name" : string
"container" : string
...
}
"job_actions":
[
{
"freezer_action" :
{
"action" : string
"mode" : string
"src_file" : string
"backup_name" : string
"container" : string
...
},
"mandatory": False,
"max_retries": 3,
"max_retry_interval": 60
},
{
"freezer_action" :
{
...
},
"mandatory": False,
"max_retries": 3,
"max_retry_interval": 60
"job_schedule": {
"time_created": int (timestamp)
"time_started": int (timestamp)
"time_ended": int (timestamp)
"status": string ["stop", "scheduled", "running", "aborting", "removed"]
"event": string ["", "stop", "start", "abort", "remove"]
"result": string ["", "success", "fail", "aborted"]
}
]
SCHEDULING TIME INFORMATION
}
"job_schedule": {
"time_created": int (timestamp)
"time_started": int (timestamp)
"time_ended": int (timestamp)
"status": string ["stop", "scheduled", "running", "aborting", "removed"]
"event": string ["", "stop", "start", "abort", "remove"]
"result": string ["", "success", "fail", "aborted"]
SCHEDULING TIME INFORMATION
}
Scheduling Time Information
---------------------------
7.1 Scheduling Time Information
-------------------------------
Three types of scheduling can be identified:
* date - used for single run jobs
@ -255,20 +319,24 @@ Three types of scheduling can be identified:
Each type has specific parameters which can be given.
date scheduling
---------------
7.1.1 date scheduling
---------------------
::
"schedule_date": : datetime isoformat
interval scheduling
-------------------
7.1.2 interval scheduling
-------------------------
::
"schedule_interval" : "continuous", "N weeks" / "N days" / "N hours" / "N minutes" / "N seconds"
"schedule_start_date" : datetime isoformat
"schedule_end_date" : datetime isoformat
cron-like scheduling
--------------------
7.1.3 cron-like scheduling
--------------------------
::
"schedule_year" : 4 digit year
"schedule_month" : 1-12
@ -282,158 +350,260 @@ cron-like scheduling
"schedule_start_date" : datetime isoformat
"schedule_end_date" : datetime isoformat
7.2 Job examples
----------------
example backup freezer_action::
example backup job_action
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
"max_backup_level" : int
"always_backup_level": int
"restart_always_backup": int
"no_incremental" : bool
"encrypt_pass_file" : private_key_file
"log_file" : "/var/log/freezer.log"
"hostname" : false
"max_cpu_priority" : false
}
example restore job_action
"job_action": {
"action": "restore"
"restore-abs-path": "/home/tylerdurden/project_mayhem"
"container" : "my_backup_container"
"backup-name": "project_mayhem_backup"
"restore-from-host": "another_host"
"max_cpu_priority": true
}
example scheduled backup job
job will be executed once at the provided datetime
"job": {
"job_action": {
"action" : "backup",
"mode" : "fs",
"src_file" : "/home/tylerdurden/project_mayhem",
"backup_name" : "project_mayhem_backup",
"container" : "my_backup_container",
}
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "scheduled",
"schedule_date": "2015-06-02T16:20:00"
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "scheduled one shot",
}
new job, in stop status, with pending start request
job will be executed daily at the provided hour:min:sec
while year,month,day are ignored, if provided
"job": {
"job_action": {
"freezer_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "stop",
"event": "start"
"schedule_period" : "daily"
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
"max_backup_level" : int
"always_backup_level": int
"restart_always_backup": int
"no_incremental" : bool
"encrypt_pass_file" : private_key_file
"log_file" : "/var/log/freezer.log"
"hostname" : false
"max_cpu_priority" : false
}
example restore freezer_action::
multiple scheduling choices allowed
"job": {
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"freezer_action": {
"action": "restore"
"restore-abs-path": "/home/tylerdurden/project_mayhem"
"container" : "my_backup_container"
"backup-name": "project_mayhem_backup"
"restore-from-host": "another_host"
"max_cpu_priority": true
}
example scheduled backup job.
job will be executed once at the provided datetime::
"job": {
"job_actions":
[
{
"freezer_action":
{
"action" : "backup",
"mode" : "fs",
"src_file" : "/home/tylerdurden/project_mayhem",
"backup_name" : "project_mayhem_backup",
"container" : "my_backup_container",
}
"exit_status": "fail|success"
"max_retries": int,
"max_retries_interval": secs,
"mandatory": bool
},
{
action
...
},
{
action
...
}
],
"job_schedule":
{
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "stop | scheduled | running",
"schedule_date": "2015-06-02T16:20:00",
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "scheduled one shot",
}
"job": {
"job_actions":
[ ... ],
"job_schedule":
{
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "stop",
"event": "start"
"schedule_interval" : "1 day"
"schedule_start_date" : "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
multiple scheduling choices allowed::
"job": {
"job_actions":
[ ... ],
"job_schedule":
{
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "scheduled"
"schedule_month" : "1-6, 9-12"
"schedule_day" : "mon, wed, fri"
"schedule_hour": "03"
"schedule_minute": "25"
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
Finished job with result::
"job": {
"job_actions": [ ... ],
"job_schedule":
{
"time_created": 1234,
"time_started": 1234,
"time_ended": 4321,
"status": "stop",
"event": "",
"result": "success",
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "one shot job",
}
8 Actions
=========
Actions are stored only to facilitate the assembling of different actions into jobs in the web UI.
They are not directly used by the scheduler.
They are stored in this structure:
::
{
"freezer_action": {
"action": string,
"backup_name": string,
....
},
"mandatory": bool,
"max_retries": int,
"max_retries_interval": int
"action_id": string,
"user_id": string
}
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "scheduled"
"schedule_month" : "1-6, 9-12"
"schedule_day" : "mon, wed, fri"
"schedule_hour": "03"
"schedule_minute": "25"
9. Sessions
===========
A session is a group of jobs which share the same scheduling time. A session is identified
by its **session_id** and has a numeric tag (**session_tag**) which is incremented each time that a new session
is started.
The purpose of the *session_tag* is that of identifying a group of jobs which have been executed
together and which therefore represent a snapshot of a distributed system.
When a job is added to a session, the scheduling time of the session is copied into the
job data structure, so that any job belonging to the same session will start at the same time.
9.1 Session Data Structure
--------------------------
::
session =
{
"session_id": string,
"session_tag": int,
"description": string,
"hold_off": int (seconds),
"schedule": { scheduling information, same as jobs },
"jobs": { 'job_id_1': {
"client_id": string,
"status": string,
"result": string
"time_started": int (timestamp),
"time_ended": int (timestamp),
},
'job_id_2': {
"client_id": string,
"status": string,
"result": string
"time_started": int (timestamp),
"time_ended": int (timestamp),
}
}
"time_start": int timestam,
"time_end": int timestam,
"time_started": int (timestamp),
"time_ended": int (timestamp),
"status": string "completed" "running",
"result": string "success" "fail",
"user_id": string
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
9.2 Session actions
-------------------
When the freezer scheduler running on a node wants to start a session,
it sends a POST request to the following endpoint: ::
Finished job with result:
"job": {
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 4321,
"status": "stop",
"event": "",
"result": "success",
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "one shot job",
}
POST /v1/sessions/{sessions_id}/action
The body of the request bears the action and parameters
Ini version:
9.2.1 Session START action
--------------------------
::
[job]
job_id = 12344321
client_id = 12344321
user_id = qwerty
description = scheduled one shot
{
"start": {
"job_id": "JOB_ID_HERE",
"current_tag": 22
}
}
[job_action]
action = backup
mode = fs
src_file = /home/tylerdurden/project_mayhem
backup_name = project_mayhem_backup
container = my_backup_container
Example of a succesfull response: ::
{
'result': 'success',
'session_tag': 23
}
8.2.2 Session STOP action
-------------------------
::
{
"end": {
"job_id": "JOB_ID_HERE",
"current_tag": 23,
"result": "success|fail"
}
}
8.3 Session-Job association
---------------------------
PUT /v1/sessions/{sessions_id}/jobs/{job_id} adds the job to the session
DELETE /v1/sessions/{sessions_id}/jobs/{job_id} adds the job to the session
[job_schedule]
time_created = 1234
time_started = 1234
time_ended =
status = scheduled
schedule_time = 2015-06-02T16:20:00

View File

@ -19,10 +19,13 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
from freezer_api.api.v1 import actions
from freezer_api.api.v1 import backups
from freezer_api.api.v1 import clients
from freezer_api.api.v1 import jobs
from freezer_api.api.v1 import homedoc
from freezer_api.api.v1 import jobs
from freezer_api.api.v1 import sessions
VERSION = {
'id': '1',
@ -59,4 +62,23 @@ def public_endpoints(storage_driver):
('/jobs/{job_id}',
jobs.JobsResource(storage_driver)),
('/actions',
actions.ActionsCollectionResource(storage_driver)),
('/actions/{action_id}',
actions.ActionsResource(storage_driver)),
('/sessions',
sessions.SessionsCollectionResource(storage_driver)),
('/sessions/{session_id}',
sessions.SessionsResource(storage_driver)),
('/sessions/{session_id}/action',
sessions.SessionsAction(storage_driver)),
('/sessions/{session_id}/jobs/{job_id}',
sessions.SessionsJob(storage_driver)),
]

View File

@ -0,0 +1,101 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import falcon
from freezer_api.common import exceptions
class ActionsCollectionResource(object):
"""
Handler for endpoint: /v1/actions
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/actions(?limit,offset) Lists actions
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.search_action(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'actions': obj_list}
def on_post(self, req, resp):
# POST /v1/actions Creates action entry
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body')
user_id = req.get_header('X-User-ID')
action_id = self.db.add_action(user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'action_id': action_id}
class ActionsResource(object):
"""
Handler for endpoint: /v1/actions/{action_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, action_id):
# GET /v1/actions/{action_id} retrieves the specified action
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_action(user_id=user_id, action_id=action_id)
if obj:
req.context['result'] = obj
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, action_id):
# DELETE /v1/actions/{action_id} Deletes the specified action
user_id = req.get_header('X-User-ID')
self.db.delete_action(user_id=user_id, action_id=action_id)
req.context['result'] = {'action_id': action_id}
resp.status = falcon.HTTP_204
def on_patch(self, req, resp, action_id):
# PATCH /v1/actions/{action_id} updates the specified action
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.update_action(user_id=user_id,
action_id=action_id,
patch_doc=doc)
req.context['result'] = {'action_id': action_id,
'version': new_version}
def on_post(self, req, resp, action_id):
# PUT /v1/actions/{job_id} creates/replaces the specified action
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.replace_action(user_id=user_id,
action_id=action_id,
doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'action_id': action_id,
'version': new_version}

View File

@ -0,0 +1,348 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import time
import falcon
from freezer_api.common import exceptions
class SessionsCollectionResource(object):
"""
Handler for endpoint: /v1/sessions
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/sessions(?limit,offset) Lists sessions
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.search_session(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'sessions': obj_list}
def on_post(self, req, resp):
# POST /v1/sessions Creates session entry
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body')
user_id = req.get_header('X-User-ID')
session_id = self.db.add_session(user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'session_id': session_id}
class SessionsResource(object):
"""
Handler for endpoint: /v1/sessions/{session_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, session_id):
# GET /v1/sessions/{session_id} retrieves the specified session
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_session(user_id=user_id, session_id=session_id)
if obj:
req.context['result'] = obj
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, session_id):
# DELETE /v1/sessions/{session_id} Deletes the specified session
user_id = req.get_header('X-User-ID')
self.db.delete_session(user_id=user_id, session_id=session_id)
req.context['result'] = {'session_id': session_id}
resp.status = falcon.HTTP_204
def on_patch(self, req, resp, session_id):
# PATCH /v1/sessions/{session_id} updates the specified session
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=doc)
req.context['result'] = {'session_id': session_id,
'version': new_version}
def on_post(self, req, resp, session_id):
# PUT /v1/sessions/{session_id} creates/replaces the specified session
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.replace_session(user_id=user_id,
session_id=session_id,
doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'session_id': session_id,
'version': new_version}
class SessionsAction(object):
"""
Handler for endpoint: /v1/sessions/{session_id}/action
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_post(self, req, resp, session_id):
# POST /v1/sessions/{session_id}/action
# executes an action on the specified session
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
try:
action, params = next(doc.iteritems())
except:
raise exceptions.BadDataFormat("Bad action request format")
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
session = Session(session_doc)
session.execute_action(action, params)
if session.need_update:
self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=session.doc)
resp.status = falcon.HTTP_202
req.context['result'] = {'result': session.action_result,
'session_tag': session.session_tag}
class Session(object):
"""
A class to manage the actions that can be taken upon a
Session data structure.
It modifies information contained in its document
in accordance to the requested action
"""
def __init__(self, doc):
self.doc = doc
self.action_result = ''
self.need_update = False
@property
def session_tag(self):
return self.doc.get('session_tag', 0)
@session_tag.setter
def session_tag(self, value):
self.doc['session_tag'] = value
def execute_action(self, action, params):
if action == 'start':
try:
self.start(params['job_id'], params['current_tag'])
except exceptions.BadDataFormat:
raise
except Exception as e:
raise exceptions.FreezerAPIException(e)
elif action == 'end':
try:
self.end(params['job_id'], params['result'])
except exceptions.BadDataFormat:
raise
except Exception as e:
raise exceptions.FreezerAPIException(e)
else:
raise exceptions.MethodNotImplemented("Bad Action Method")
def end(self, job_id, result):
"""
Apply the 'end' action to the session object
If the request can be accepted it modifies the relevant fields
and sets the need_update member to notify that the stored
document needs to be updated
"""
now = int(time.time())
self.set_job_end(job_id, result, now)
new_result = self.get_job_overall_result()
if self.doc.get('status', '') != 'completed':
if new_result in ['fail', 'success']:
self.doc['time_end'] = now
self.doc['result'] = new_result
self.doc['status'] = 'completed'
self.action_result = 'success'
self.need_update = True
def start(self, job_id, job_tag):
"""
Apply the 'start' action to the session object
If the request can be accepted it modifies the relevant fields
and sets the need_update member to notify that the stored
document needs to be updated
"""
now = int(time.time())
time_since_last_start = now - self.doc.get('time_start', 0)
if job_tag > self.session_tag:
raise exceptions.BadDataFormat('requested tag value too high')
if time_since_last_start <= self.doc.get('hold_off', 60):
# session has been started not so long ago
# tag increments are not allowed during hold_off
if job_tag < self.session_tag:
self.action_result = 'success'
self.set_job_start(job_id, now)
self.need_update = True
else:
self.action_result = 'hold-off'
self.need_update = False
elif time_since_last_start > self.doc.get('hold_off', 60):
# out of hold_off window:
# - ok to trigger new action start (job_tag == session_tag)
# if job_tag < session_tag client is probably out-of-sync
if self.session_tag == job_tag:
self.session_tag += 1
self.doc['time_start'] = now
self.doc['status'] = 'running'
self.doc['result'] = ''
self.action_result = 'success'
self.set_job_start(job_id, now)
self.need_update = True
else:
self.action_result = 'out-of-sync'
self.need_update = False
def get_job_overall_result(self):
"""
check the status of all the jobs and return the overall session result
"""
for job in self.doc['jobs'].itervalues():
if job['status'] != 'completed':
return 'running'
if job['result'] != 'success':
return 'fail'
return 'success'
def set_job_end(self, job_id, result, timestamp):
try:
job = self.doc['jobs'][job_id]
except:
raise exceptions.BadDataFormat('job_id not found in session')
job['status'] = 'completed'
job['result'] = result
job['time_ended'] = timestamp
def set_job_start(self, job_id, timestamp):
try:
job = self.doc['jobs'][job_id]
except:
raise exceptions.BadDataFormat('job_id not found in session')
job['status'] = 'running'
job['result'] = ''
job['time_started'] = timestamp
class SessionsJob(object):
"""
Handler for endpoint: /v1/sessions/{session_id}/jobs/{job_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_put(self, req, resp, session_id, job_id):
"""
add a job to a session
:param req:
:param resp:
:param session_id:
:param job_id:
:return:
"""
user_id = req.get_header('X-User-ID', '')
# --- update session object
job_doc = self.db.get_job(user_id=user_id, job_id=job_id)
job_schedule = job_doc.get('job_schedule', {})
session_update_doc = {
'jobs': {
job_id: {
'client_id': job_doc['client_id'],
'status': job_schedule.get('status', ''),
'result': job_schedule.get('result', ''),
'time_started': job_schedule.get('time_started', ''),
'time_ended': job_schedule.get('time_ended', '')
}
}
}
self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=session_update_doc)
# --- update job object
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
job_update_doc = {
'session_id': session_id,
'session_tag': session_doc['session_tag'],
'job_schedule': session_doc['schedule']
}
self.db.update_job(user_id=user_id,
job_id=job_id,
patch_doc=job_update_doc)
resp.status = falcon.HTTP_204
def on_delete(self, req, resp, session_id, job_id):
"""
remove a job from the session
:param req:
:param resp:
:param session_id:
:param job_id:
:return:
"""
user_id = req.get_header('X-User-ID') or ''
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
session_doc['jobs'].pop(job_id, None)
# when replacing, db might raise a VersionConflictEngineException
self.db.replace_session(user_id=user_id,
session_id=session_id,
doc=session_doc)
job_update_doc = {
'session_id': '',
'session_tag': '',
'job_event': 'stop'
}
self.db.update_job(user_id=user_id,
job_id=job_id,
patch_doc=job_update_doc)
resp.status = falcon.HTTP_204

View File

@ -77,8 +77,13 @@ except Exception as err:
def main():
# quick simple server for testing purposes or simple scenarios
ip, port = '127.0.0.1', 9090
httpd = simple_server.make_server(ip, port, application)
if len(sys.argv) > 1:
ip = sys.argv[1]
if ':' in ip:
ip, port = ip.split(':')
httpd = simple_server.make_server(ip, int(port), application)
message = 'Server listening on {0}:{1}'.format(ip, port)
print message
logging.info(message)

View File

@ -32,7 +32,7 @@ class FreezerAPIException(falcon.HTTPError):
def __init__(self, message=''):
if message:
self.message = message
self.message = str(message)
logging.error(message)
Exception.__init__(self, message)
@ -74,6 +74,7 @@ class DocumentNotFound(FreezerAPIException):
title="Not Found",
description=ex.message)
class AccessForbidden(FreezerAPIException):
@staticmethod
def handle(ex, req, resp, params):
@ -81,10 +82,20 @@ class AccessForbidden(FreezerAPIException):
title="Access Forbidden",
description=ex.message)
class MethodNotImplemented(FreezerAPIException):
@staticmethod
def handle(ex, req, resp, params):
raise falcon.HTTPMethodNotAllowed(
title="Bad Method",
description=ex.message)
exception_handlers_catalog = [
BadDataFormat,
DocumentExists,
StorageEngineError,
DocumentNotFound,
AccessForbidden
AccessForbidden,
MethodNotImplemented
]

View File

@ -19,124 +19,163 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
job_action_properties = {
"action": {
"id": "action",
"type": "string"
},
"mode": {
"id": "mode",
"type": "string"
},
"src_file": {
"id": "src_file",
"type": "string"
},
"backup_name": {
"id": "backup_name",
"type": "string"
},
"container": {
"id": "container",
"type": "string"
}
}
freezer_action_properties = {
"action": {
"id": "action",
"pattern": "^[\w-]+$",
"type": "string"
},
"mode": {
"id": "mode",
"pattern": "^[\w-]+$",
"type": "string"
},
"src_file": {
"id": "src_file",
"type": "string"
},
"backup_name": {
"id": "backup_name",
"type": "string"
},
"container": {
"id": "container",
"type": "string"
},
"restore_abs_path": {
"id": "restore_abs_path",
"type": "string"
},
}
job_schedule_properties = {
"time_created": {
"id": "time_created",
"type": "integer"
},
"time_started": {
"id": "time_started",
"type": "integer"
},
"time_ended": {
"id": "time_ended",
"type": "integer"
},
"event": {
"id": "event",
"type": "string",
"enum": ["", "stop", "start", "abort", "remove"]
},
"status": {
"id": "status",
"type": "string",
"enum": ["completed", "stop", "scheduled", "running", "aborting", "removed"]
},
"result": {
"id": "result",
"type": "string",
"enum": ["", "success", "fail", "aborted"]
},
"schedule_date": {
"id": "schedule_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_interval": {
"id": "schedule_interval",
"type": "string",
"pattern": "^(continuous|(\d+ +(weeks|weeks|days|hours|minutes|seconds)))$"
},
"schedule_start_date": {
"id": "schedule_start_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_end_date": {
"id": "schedule_end_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_year": {
"id": "schedule_year",
"type": "string",
"pattern": "^\d{4}$"
},
"schedule_month": {
"id": "schedule_month",
"type": "string"
},
"schedule_day": {
"id": "schedule_day",
"type": "string"
},
"schedule_week": {
"id": "schedule_week",
"type": "string"
},
"schedule_day_of_week": {
"id": "schedule_day_of_week",
"type": "string"
},
"schedule_hour": {
"id": "schedule_hour",
"type": "string"
},
"schedule_minute": {
"id": "schedule_minute",
"type": "string"
},
"schedule_second": {
"id": "schedule_second",
"type": "string"
},
}
"time_created": {
"id": "time_created",
"type": "integer"
},
"time_started": {
"id": "time_started",
"type": "integer"
},
"time_ended": {
"id": "time_ended",
"type": "integer"
},
"event": {
"id": "event",
"type": "string",
"enum": ["", "stop", "start", "abort", "remove"]
},
"status": {
"id": "status",
"type": "string",
"enum": ["completed", "stop", "scheduled",
"running", "aborting", "removed"]
},
"result": {
"id": "result",
"type": "string",
"enum": ["", "success", "fail", "aborted"]
},
"schedule_date": {
"id": "schedule_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])"
"-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])"
":([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-]"
"(?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_interval": {
"id": "schedule_interval",
"type": "string",
"pattern": "^(continuous|(\d+ +(weeks|weeks|days|"
"hours|minutes|seconds)))$"
},
"schedule_start_date": {
"id": "schedule_start_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])"
"-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):"
"([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-]"
"(?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_end_date": {
"id": "schedule_end_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])"
"-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])"
":([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-]"
"(?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_year": {
"id": "schedule_year",
"type": "string",
"pattern": "^\d{4}$"
},
"schedule_month": {
"id": "schedule_month",
"type": "string"
},
"schedule_day": {
"id": "schedule_day",
"type": "string"
},
"schedule_week": {
"id": "schedule_week",
"type": "string"
},
"schedule_day_of_week": {
"id": "schedule_day_of_week",
"type": "string"
},
"schedule_hour": {
"id": "schedule_hour",
"type": "string"
},
"schedule_minute": {
"id": "schedule_minute",
"type": "string"
},
"schedule_second": {
"id": "schedule_second",
"type": "string"
},
}
job_schema = {
"id": "/",
"type": "object",
"properties": {
"definitions": {
"freezer_action": {
"properties": freezer_action_properties,
"additionalProperties": True
},
"job_action": {
"id": "job_action",
"type": "object",
"properties": job_action_properties,
"additionalProperties": True,
"required": [
"action"
]
"properties": {
"freezer_action": {
"$ref": "#/definitions/freezer_action"
},
"max_retries": {
"type": "integer"
},
"max_retries_interval": {
"type": "integer"
},
"mandatory": {
"type": "boolean"
}
},
"additionalProperties": True
},
"job_action_list": {
"items": {
"$ref": "#/definitions/job_action"
}
}
},
"properties": {
"job_actions": {
"$ref": "#/definitions/job_action_list"
},
"job_schedule": {
"id": "job_schedule",
@ -162,11 +201,15 @@ job_schema = {
"description": {
"id": "description",
"type": "string"
},
"_version": {
"id": "_version",
"type": "integer"
}
},
"additionalProperties": False,
"required": [
"job_action",
"job_actions",
"job_schedule",
"job_id",
"client_id",
@ -174,17 +217,41 @@ job_schema = {
]
}
job_patch_schema = {
"id": "/",
"type": "object",
"properties": {
"job_action": {
"id": "job_action",
"type": "object",
"properties": job_action_properties,
"definitions": {
"freezer_action": {
"properties": freezer_action_properties,
"additionalProperties": True
},
"job_action": {
"properties": {
"freezer_action": {
"$ref": "#/definitions/freezer_action"
},
"max_retries": {
"type": "integer"
},
"max_retries_interval": {
"type": "integer"
},
"mandatory": {
"type": "boolean"
}
},
"additionalProperties": True
},
"job_action_list": {
"items": {
"$ref": "#/definitions/job_action"
}
}
},
"properties": {
"job_actions": {
"$ref": "#/definitions/job_action_list"
},
"job_schedule": {
"id": "job_schedule",
"type": "object",
@ -193,20 +260,128 @@ job_patch_schema = {
},
"job_id": {
"id": "job_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"client_id": {
"id": "client_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"user_id": {
"id": "user_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"description": {
"id": "description",
"type": "string"
},
"_version": {
"id": "_version",
"type": "integer"
}
},
"additionalProperties": False
}
additional_action_properties = {
"action_id": {
"id": "action_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"user_id": {
"id": "user_id",
"pattern": "^[\w-]+$",
"type": "string"
},
}
action_schema = {
"id": "/",
"type": "object",
"properties": dict(freezer_action_properties.items() +
additional_action_properties.items()),
"additionalProperties": True,
"required": [
"action_id",
"user_id"
]
}
action_patch_schema = {
"id": "/",
"type": "object",
"properties": dict(freezer_action_properties.items() +
additional_action_properties.items()),
"additionalProperties": True
}
session_schema = {
"id": "/",
"type": "object",
"properties": {
"session_id": {
"id": "session_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"user_id": {
"id": "user_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"session_tag": {
"id": "session_tag",
"type": "integer"
},
"time_started": {
"id": "session_tag",
"type": "integer"
},
"time_ended": {
"id": "session_tag",
"type": "integer"
},
},
"additionalProperties": True,
"required": [
"session_id",
"session_tag",
"user_id"
]
}
session_patch_schema = {
"id": "/",
"type": "object",
"properties": {
"session_id": {
"id": "session_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"user_id": {
"id": "user_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"session_tag": {
"id": "session_tag",
"type": "integer"
},
"time_started": {
"id": "session_tag",
"type": "integer"
},
"time_ended": {
"id": "session_tag",
"type": "integer"
},
},
"additionalProperties": True
}

View File

@ -55,7 +55,7 @@ class BackupMetadataDoc:
def backup_set_id(self):
return '{0}_{1}_{2}'.format(
self.data['container'],
self.data['host_name'],
self.data['hostname'],
self.data['backup_name']
)
@ -63,8 +63,8 @@ class BackupMetadataDoc:
def backup_id(self):
return '{0}_{1}_{2}'.format(
self.backup_set_id,
self.data['timestamp'],
self.data['level']
self.data['time_stamp'],
self.data['curr_backup_level']
)
@ -120,3 +120,102 @@ class JobDoc:
})
JobDoc.validate(doc)
return doc
class ActionDoc:
action_doc_validator = jsonschema.Draft4Validator(
schema=json_schemas.action_schema)
action_patch_validator = jsonschema.Draft4Validator(
schema=json_schemas.action_patch_schema)
@staticmethod
def validate(doc):
try:
ActionDoc.action_doc_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@staticmethod
def validate_patch(doc):
try:
ActionDoc.action_patch_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@staticmethod
def create_patch(doc):
# changes in user_id or action_id are not allowed
doc.pop('user_id', None)
doc.pop('action_id', None)
ActionDoc.validate_patch(doc)
return doc
@staticmethod
def create(doc, user_id):
doc.update({
'user_id': user_id,
'action_id': uuid.uuid4().hex,
})
ActionDoc.validate(doc)
return doc
@staticmethod
def update(doc, user_id, action_id):
doc.update({
'user_id': user_id,
'action_id': action_id,
})
ActionDoc.validate(doc)
return doc
class SessionDoc:
session_doc_validator = jsonschema.Draft4Validator(
schema=json_schemas.session_schema)
session_patch_validator = jsonschema.Draft4Validator(
schema=json_schemas.session_patch_schema)
@staticmethod
def validate(doc):
try:
SessionDoc.session_doc_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@staticmethod
def validate_patch(doc):
try:
SessionDoc.session_patch_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@staticmethod
def create_patch(doc):
# changes in user_id or session_id are not allowed
doc.pop('user_id', None)
doc.pop('session_id', None)
SessionDoc.validate_patch(doc)
return doc
@staticmethod
def create(doc, user_id, hold_off=30):
doc.update({
'user_id': user_id,
'session_id': uuid.uuid4().hex,
'session_tag': 0,
'status': 'active',
'last_start': '',
'jobs': []
})
doc['hold_off'] = doc.get('hold_off', hold_off)
SessionDoc.validate(doc)
return doc
@staticmethod
def update(doc, user_id, session_id):
doc.update({
'user_id': user_id,
'session_id': session_id,
})
SessionDoc.validate(doc)
return doc

View File

@ -23,6 +23,8 @@ import elasticsearch
import logging
from freezer_api.common.utils import BackupMetadataDoc
from freezer_api.common.utils import JobDoc
from freezer_api.common.utils import ActionDoc
from freezer_api.common.utils import SessionDoc
from freezer_api.common import exceptions
@ -33,16 +35,19 @@ class TypeManager:
self.doc_type = doc_type
@staticmethod
def get_base_search_filter(user_id, search={}):
def get_base_search_filter(user_id, search=None):
search = search or {}
user_id_filter = {"term": {"user_id": user_id}}
base_filter = [user_id_filter]
match_list = [{"match": m} for m in search.get('match', [])]
match_not_list = [{"match": m} for m in search.get('match_not', [])]
base_filter.append({"query": {"bool": {"must": match_list, "must_not": match_not_list}}})
base_filter.append({"query": {"bool": {"must": match_list,
"must_not": match_not_list}}})
return base_filter
@staticmethod
def get_search_query(user_id, doc_id, search={}):
def get_search_query(user_id, doc_id, search=None):
search = search or {}
try:
base_filter = TypeManager.get_base_search_filter(user_id, search)
query_filter = {"filter": {"bool": {"must": base_filter}}}
@ -65,16 +70,19 @@ class TypeManager:
message='Get operation failed: {0}'.format(e))
if doc['user_id'] != user_id:
raise exceptions.AccessForbidden("Document access forbidden")
if '_version' in res:
doc['_version'] = res['_version']
return doc
def search(self, user_id, doc_id=None, search={}, offset=0, limit=10):
def search(self, user_id, doc_id=None, search=None, offset=0, limit=10):
search = search or {}
query_dsl = self.get_search_query(user_id, doc_id, search)
try:
res = self.es.search(index=self.index, doc_type=self.doc_type,
size=limit, from_=offset, body=query_dsl)
except elasticsearch.ConnectionError:
raise exceptions.StorageEngineError(
message='unable to connecto to db server')
message='unable to connect to db server')
except Exception as e:
raise exceptions.StorageEngineError(
message='search operation failed: {0}'.format(e))
@ -82,11 +90,17 @@ class TypeManager:
return [x['_source'] for x in hit_list]
def insert(self, doc, doc_id=None):
version = doc.pop('_version', 0)
try:
res = self.es.index(index=self.index, doc_type=self.doc_type,
body=doc, id=doc_id)
body=doc, id=doc_id, version=version)
created = res['created']
version = res['_version']
except elasticsearch.TransportError as e:
if e.status_code == 409:
raise exceptions.DocumentExists(message=e.error)
raise exceptions.StorageEngineError(
message='index operation failed {0}'.format(e))
except Exception as e:
raise exceptions.StorageEngineError(
message='index operation failed {0}'.format(e))
@ -109,7 +123,8 @@ class BackupTypeManager(TypeManager):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
def get_search_query(user_id, doc_id, search=None):
search = search or {}
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"backup_id": doc_id}})
@ -132,7 +147,8 @@ class ClientTypeManager(TypeManager):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
def get_search_query(user_id, doc_id, search=None):
search = search or {}
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"client_id": doc_id}})
@ -145,7 +161,8 @@ class JobTypeManager(TypeManager):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
def get_search_query(user_id, doc_id, search=None):
search = search or {}
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"job_id": doc_id}})
@ -153,21 +170,93 @@ class JobTypeManager(TypeManager):
return {'query': {'filtered': query_filter}}
def update(self, job_id, job_update_doc):
version = job_update_doc.pop('_version', 0)
update_doc = {"doc": job_update_doc}
try:
res = self.es.update(index=self.index, doc_type=self.doc_type,
id=job_id, body=update_doc)
id=job_id, body=update_doc, version=version)
version = res['_version']
except elasticsearch.TransportError:
except elasticsearch.TransportError as e:
if e.status_code == 409:
raise exceptions.DocumentExists(message=e.error)
raise exceptions.DocumentNotFound(
message='Unable to find job to update '
'with id {0} '.format(job_id))
'with id {0}. {1}'.format(job_id, e))
except Exception:
raise exceptions.StorageEngineError(
message='Unable to update job with id {0}'.format(job_id))
return version
class ActionTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search=None):
search = search or {}
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"action_id": doc_id}})
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
def update(self, action_id, action_update_doc):
version = action_update_doc.pop('_version', 0)
update_doc = {"doc": action_update_doc}
try:
res = self.es.update(index=self.index, doc_type=self.doc_type,
id=action_id, body=update_doc,
version=version)
version = res['_version']
except elasticsearch.TransportError as e:
if e.status_code == 409:
raise exceptions.DocumentExists(message=e.error)
raise exceptions.DocumentNotFound(
message='Unable to find action to update '
'with id {0} '.format(action_id))
except Exception:
raise exceptions.StorageEngineError(
message='Unable to update action with'
' id {0}'.format(action_id))
return version
class SessionTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search=None):
search = search or {}
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"session_id": doc_id}})
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
def update(self, session_id, session_update_doc):
version = session_update_doc.pop('_version', 0)
update_doc = {"doc": session_update_doc}
try:
res = self.es.update(index=self.index, doc_type=self.doc_type,
id=session_id, body=update_doc,
version=version)
version = res['_version']
except elasticsearch.TransportError as e:
if e.status_code == 409:
raise exceptions.DocumentExists(message=e.error)
raise exceptions.DocumentNotFound(
message='Unable to update session {0}. '
'{1}'.format(session_id, e))
except Exception:
raise exceptions.StorageEngineError(
message='Unable to update session with '
'id {0}'.format(session_id))
return version
class ElasticSearchEngine(object):
def __init__(self, hosts, index='freezer'):
@ -177,9 +266,12 @@ class ElasticSearchEngine(object):
self.backup_manager = BackupTypeManager(self.es, 'backups')
self.client_manager = ClientTypeManager(self.es, 'clients')
self.job_manager = JobTypeManager(self.es, 'jobs')
self.action_manager = ActionTypeManager(self.es, 'actions')
self.session_manager = SessionTypeManager(self.es, 'sessions')
def get_backup(self, user_id, backup_id=None,
offset=0, limit=10, search={}):
offset=0, limit=10, search=None):
search = search or {}
return self.backup_manager.search(user_id,
backup_id,
search=search,
@ -204,7 +296,8 @@ class ElasticSearchEngine(object):
return self.backup_manager.delete(user_id, backup_id)
def get_client(self, user_id, client_id=None,
offset=0, limit=10, search={}):
offset=0, limit=10, search=None):
search = search or {}
return self.client_manager.search(user_id,
client_id,
search=search,
@ -233,7 +326,8 @@ class ElasticSearchEngine(object):
def get_job(self, user_id, job_id):
return self.job_manager.get(user_id, job_id)
def search_job(self, user_id, offset=0, limit=10, search={}):
def search_job(self, user_id, offset=0, limit=10, search=None):
search = search or {}
return self.job_manager.search(user_id,
search=search,
offset=offset,
@ -278,3 +372,103 @@ class ElasticSearchEngine(object):
logging.info('Job {0} replaced with version {1}'.
format(job_id, version))
return version
def get_action(self, user_id, action_id):
return self.action_manager.get(user_id, action_id)
def search_action(self, user_id, offset=0, limit=10, search=None):
search = search or {}
return self.action_manager.search(user_id,
search=search,
offset=offset,
limit=limit)
def add_action(self, user_id, doc):
actiondoc = ActionDoc.create(doc, user_id)
action_id = actiondoc['action_id']
self.action_manager.insert(actiondoc, action_id)
logging.info('Action registered, action id: {0}'.
format(action_id))
return action_id
def delete_action(self, user_id, action_id):
return self.action_manager.delete(user_id, action_id)
def update_action(self, user_id, action_id, patch_doc):
valid_patch = ActionDoc.create_patch(patch_doc)
# check that document exists
assert (self.action_manager.get(user_id, action_id))
version = self.action_manager.update(action_id, valid_patch)
logging.info('Action {0} updated to version {1}'.
format(action_id, version))
return version
def replace_action(self, user_id, action_id, doc):
# check that no document exists with
# same action_id and different user_id
try:
self.action_manager.get(user_id, action_id)
except exceptions.DocumentNotFound:
pass
valid_doc = ActionDoc.update(doc, user_id, action_id)
(created, version) = self.action_manager.insert(valid_doc, action_id)
if created:
logging.info('Action {0} created'.format(action_id, version))
else:
logging.info('Action {0} replaced with version {1}'.
format(action_id, version))
return version
def get_session(self, user_id, session_id):
return self.session_manager.get(user_id, session_id)
def search_session(self, user_id, offset=0, limit=10, search=None):
search = search or {}
return self.session_manager.search(user_id,
search=search,
offset=offset,
limit=limit)
def add_session(self, user_id, doc):
session_doc = SessionDoc.create(doc, user_id)
session_id = session_doc['session_id']
self.session_manager.insert(session_doc, session_id)
logging.info('Session registered, session id: {0}'.
format(session_id))
return session_id
def delete_session(self, user_id, session_id):
return self.session_manager.delete(user_id, session_id)
def update_session(self, user_id, session_id, patch_doc):
valid_patch = SessionDoc.create_patch(patch_doc)
# check that document exists
assert (self.session_manager.get(user_id, session_id))
version = self.session_manager.update(session_id, valid_patch)
logging.info('Session {0} updated to version {1}'.
format(session_id, version))
return version
def replace_session(self, user_id, session_id, doc):
# check that no document exists with
# same session_id and different user_id
try:
self.session_manager.get(user_id, session_id)
except exceptions.DocumentNotFound:
pass
valid_doc = SessionDoc.update(doc, user_id, session_id)
(created, version) = self.session_manager.insert(valid_doc, session_id)
if created:
logging.info('Session {0} created'.format(session_id))
else:
logging.info('Session {0} replaced with version {1}'.
format(session_id, version))
return version

View File

@ -0,0 +1,6 @@
oslo.config
oslo.i18n
keystonemiddleware
jsonschema
elasticsearch
falcon>=0.2.0

View File

@ -7,7 +7,7 @@ summary = OpenStack Backup and Restore Service
description-file =
README.rst
author = Fausto Marzi, Fabrizio Fresco, Fabrizio Vanni',
author = Fausto Marzi, Fabrizio Fresco, Fabrizio Vanni,
author_email = fausto.marzi@hp.com, fabrizio.vanni@hp.com, fabrizio.fresco@hp.com
home-page = https://github.com/stackforge/freezer

View File

@ -34,10 +34,10 @@ fake_data_0_wrapped_backup_metadata = {
'user_name': 'asdffdsa',
'backup_metadata': {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 8475903425,
"level": 0,
"time_stamp": 8475903425,
"curr_backup_level": 0,
"backup_session": 8475903425,
"max_level": 5,
"mode" : "fs",
@ -60,10 +60,10 @@ fake_data_0_wrapped_backup_metadata = {
fake_data_0_backup_metadata = {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 8475903425,
"level": 0,
"time_stamp": 8475903425,
"curr_backup_level": 0,
"backup_session": 8475903425,
"max_level": 5,
"mode": "fs",
@ -84,10 +84,10 @@ fake_data_0_backup_metadata = {
}
fake_malformed_data_0_backup_metadata = {
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 8475903425,
"level": 0,
"time_stamp": 8475903425,
"curr_backup_level": 0,
"backup_session": 8475903425,
"max_level": 5,
"mode": "fs",
@ -123,10 +123,10 @@ fake_data_0_elasticsearch_hit = {
"_type": "backups",
"_source": {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 8475903425,
"level": 0,
"time_stamp": 8475903425,
"curr_backup_level": 0,
"backup_session": 8475903425,
"max_level": 5,
"mode" : "fs",
@ -180,33 +180,57 @@ fake_job_0_elasticsearch_not_found = {
"found": False
}
fake_job_0 = {
"job_action": {
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 1234,
"status": "stop",
"schedule_date": "2015-06-02T16:20:00",
"schedule_interval": "2 days"
},
"job_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"client_id": "mytenantid_myhostname",
"user_id": "f4db4da085f043059441565720b217c7",
"description": "test action 4"
"job_actions": [
{
"freezer_action": {
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container"
},
"max_retries": 3,
"max_retries_interval": 60,
"mandatory": False
},
{
"freezer_action": {
"action": "restore",
"mode": "fs",
"restore_abs_path": "/home/tylerdurden/project_mayhem",
"restore_from_host": "node_on_which_backup_was_made",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container"
},
"max_retries": 3,
"max_retries_interval": 60,
"mandatory": True
}
],
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 1234,
"status": "stop",
"result": "success",
"schedule_date": "2015-06-02T16:20:00",
"schedule_interval": "2 days"
},
"job_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"client_id": "mytenantid_myhostname",
"user_id": "f4db4da085f043059441565720b217c7",
"description": "test action 4"
}
def get_fake_job_0():
return copy.deepcopy(fake_job_0)
def get_fake_job_1():
return copy.deepcopy(fake_job_1)
job = copy.deepcopy(fake_job_0)
job["job_id"] = 'pqoqurioew'
return job
fake_job_0_elasticsearch_found = {
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
@ -218,61 +242,16 @@ fake_job_0_elasticsearch_found = {
}
fake_job_1 = {
"job_action": {
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container",
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "invalid",
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "1b05e367-7832-42df-850e-bc48eabee04e",
"client_id": "mytenantid_myhostname",
"user_id": "f4db4da085f043059441565720b217c7",
"description": "test action 4"
}
# fake_action_1 = {
# "action_id": "1b05e367-7832-42df-850e-bc48eabee04e",
# "client_id": "mytenantid_myhostname",
# "description": "test action 4",
# "job": {
# "action": "restore",
# "backup-name": "project_mayhem_backup",
# "container": "my_backup_container",
# "max_cpu_priority": True,
# "restore-abs-path": "/home/tylerdurden/project_mayhem",
# "restore-from-host": "another_host"
# },
# "status": "pending",
# "time_created": 1431100962,
# "time_end": 0,
# "time_start": 0
# }
#
# fake_action_1_doc = {
# "action": fake_action_1,
# "user_id": "f4db4da085f043059441565720b217c7"
# }
#
#
fake_data_1_wrapped_backup_metadata = {
'backup_id': 'freezer_container_alpha_important_data_backup_125235431_1',
'user_id': 'qwerty1234',
'user_name': 'asdffdsa',
'backup_metadata': {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 125235431,
"level": 1,
"time_stamp": 125235431,
"curr_backup_level": 1,
"backup_session": 8475903425,
"max_level": 5,
"mode" : "fs",
@ -316,6 +295,117 @@ fake_client_entry_1 = {
}
fake_action_0 = {
"freezer_action":
{
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container",
},
"exit_status": "success",
"max_retries": 3,
"max_retries_interval": 60,
"mandatory": True,
"action_id": "qwerqwerqwerrewq",
"user_id": "user_id-is-provided-by-keystone"
}
fake_action_1 = {
"freezer_action":
{
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container",
},
"exit_status": "success",
"max_retries": 3,
"max_retries_interval": 60,
"mandatory": True,
"action_id": "jk4lkjbn4r3k",
"user_id": "user_id-is-provided-by-keystone"
}
def get_fake_action_0():
return copy.deepcopy(fake_action_0)
def get_fake_action_1():
return copy.deepcopy(fake_action_1)
fake_session_0 = {
"session_id": 'turistidellademocrazia',
"session_tag": 5,
"description": 'some text here',
"hold_off": 60,
"schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "invalid",
"schedule_time": "2015-06-02T16:20:00"
},
"jobs": {
'venerescollataincorpodalolita': {
"client_id": 'bruco',
"status": 'running',
"start_time": 12344321,
},
'job_id_2': {
"client_id": "cocktail",
"status": 'completed',
"result": 'success',
"start_time": 123321,
"end_time": 123325,
}
},
"time_start": 123412344,
"time_end": 432234432,
"status": "running",
"user_id": "califfo"
}
fake_session_1 = {
"session_id": 'turistidellademocrazia',
"session_tag": 5,
"description": 'some text here',
"hold_off": 60,
"schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "invalid",
"schedule_time": "2015-06-02T16:20:00"
},
"jobs": {
'venerescollataincorpodalolita': {
"client_id": 'bruco',
"status": 'running',
"start_time": 12344321,
}
},
"time_start": 123412344,
"time_end": 432234432,
"status": "running",
"user_id": "califfo"
}
def get_fake_session_0():
return copy.deepcopy(fake_session_0)
def get_fake_session_1():
return copy.deepcopy(fake_session_1)
class FakeReqResp:
def __init__(self, method='GET', body=''):

View File

@ -0,0 +1,148 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch
import random
import falcon
from common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import actions as v1_actions
class TestActionsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_action_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_actions.ActionsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.search_action.return_value = []
expected_result = {'actions': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.search_action.return_value = [get_fake_action_0(), get_fake_action_1()]
expected_result = {'actions': [get_fake_action_0(), get_fake_action_1()]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_action.return_value = fake_action_0['action_id']
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
action = get_fake_action_0()
self.mock_req.context['doc'] = action
self.mock_db.add_action.return_value = 'pjiofrdslaikfunr'
expected_result = {'action_id': 'pjiofrdslaikfunr'}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
# assigned_action_id = self.mock_req.context['doc']['action_id']
# self.assertNotEqual(assigned_action_id, fake_action_0['action_id'])
class TestActionsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_action_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_actions.ActionsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_actions.ActionsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_action.return_value = None
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0['action_id'])
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_action.return_value = get_fake_action_0()
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0['action_id'])
result = self.mock_req.context['result']
self.assertEqual(result, get_fake_action_0())
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_action_0['action_id'])
result = self.mock_req.context['result']
expected_result = {'action_id': fake_action_0['action_id']}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)
def test_on_patch_ok_with_some_fields(self):
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_result = {'action_id': fake_action_0['action_id'],
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0['action_id'])
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0['user_id'],
action_id=fake_action_0['action_id'],
patch_doc=patch_doc)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
def test_on_post_ok(self):
new_version = random.randint(0, 99)
self.mock_db.replace_action.return_value = new_version
action = get_fake_action_0()
self.mock_req.context['doc'] = action
expected_result = {'action_id': fake_action_0['action_id'],
'version': new_version}
self.resource.on_post(self.mock_req, self.mock_req, fake_action_0['action_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
def test_on_post_raises_when_db_replace_action_raises(self):
self.mock_db.replace_action.side_effect = AccessForbidden('regular test failure')
action = get_fake_action_0()
self.mock_req.context['doc'] = action
self.assertRaises(AccessForbidden, self.resource.on_post,
self.mock_req,
self.mock_req,
fake_action_0['action_id'])

View File

@ -51,8 +51,6 @@ class TypeManager(unittest.TestCase):
]}}}]
self.assertEqual(q, expected_q)
def test_get_ok(self):
self.mock_es.get.return_value = fake_job_0_elasticsearch_found
res = self.type_manager.get(user_id=fake_job_0_user_id,
@ -102,16 +100,28 @@ class TypeManager(unittest.TestCase):
def test_insert_ok(self):
self.mock_es.index.return_value = {'created': True, '_version': 15}
test_doc = {'test_key_412': 'test_value_412'}
test_doc = {'test_key_412': 'test_value_412', '_version': 5}
res = self.type_manager.insert(doc=test_doc)
self.assertEqual(res, (True, 15))
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None, version=5)
def test_insert_fails(self):
def test_insert_raise_StorageEngineError_on_ES_Exception(self):
self.mock_es.index.side_effect = Exception('regular test failure')
test_doc = {'test_key_412': 'test_value_412'}
test_doc = {'test_key_412': 'test_value_412', '_version': 5}
self.assertRaises(StorageEngineError, self.type_manager.insert, doc=test_doc)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None, version=5)
def test_insert_raise_StorageEngineError_on_ES_TransportError_exception(self):
self.mock_es.index.side_effect = TransportError(500, 'regular test failure')
test_doc = {'test_key_412': 'test_value_412', '_version': 5}
self.assertRaises(StorageEngineError, self.type_manager.insert, doc=test_doc)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None, version=5)
def test_insert_raise_DocumentExists_on_ES_TransportError409_exception(self):
self.mock_es.index.side_effect = TransportError(409, 'regular test failure')
test_doc = {'test_key_412': 'test_value_412', '_version': 5}
self.assertRaises(DocumentExists, self.type_manager.insert, doc=test_doc)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None, version=5)
def test_delete(self):
doc_id='mydocid345'
@ -213,19 +223,26 @@ class JobTypeManager(unittest.TestCase):
u'_version': 3
}
res = self.job_manager.update(job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
job_update_doc={'status': 'sleepy', '_version': 12})
self.assertEqual(res, 3)
self.mock_es.update.assert_called_with(index=self.job_manager.index,
doc_type=self.job_manager.doc_type,
id=fake_job_0_job_id,
body={"doc": {'status': 'sleepy'}})
body={"doc": {'status': 'sleepy'}},
version=12)
def test_update_raise_DocumentNotFound_when_not_found(self):
self.mock_es.update.side_effect = TransportError('regular test failure')
self.mock_es.update.side_effect = TransportError('regular test failure', 1)
self.assertRaises(DocumentNotFound, self.job_manager.update,
job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
def test_update_raise_DocumentExists_when_elasticsearch_returns_409(self):
self.mock_es.update.side_effect = TransportError(409, 'regular test failure')
self.assertRaises(DocumentExists, self.job_manager.update,
job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
def test_update_raise_StorageEngineError_when_db_raises(self):
self.mock_es.update.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.job_manager.update,
@ -233,6 +250,125 @@ class JobTypeManager(unittest.TestCase):
job_update_doc={'status': 'sleepy'})
class ActionTypeManager(unittest.TestCase):
def setUp(self):
self.mock_es = Mock()
self.action_manager = elastic.ActionTypeManager(self.mock_es, 'actions')
def test_get_search_query(self):
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
q = self.action_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
expected_q = {'query': {'filtered': {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}},
{'term': {'action_id': 'my_doc_id'}}
]}}}}}
self.assertEqual(q, expected_q)
def test_update_ok(self):
self.mock_es.update.return_value = {
u'_id': u'd6c1e00d-b9c1-4eb3-8219-1e83c02af101',
u'_index': u'freezer',
u'_type': u'actions',
u'_version': 3
}
res = self.action_manager.update(action_id='poiuuiop7890',
action_update_doc={'status': 'sleepy', '_version': 12})
self.assertEqual(res, 3)
self.mock_es.update.assert_called_with(index=self.action_manager.index,
doc_type=self.action_manager.doc_type,
id='poiuuiop7890',
body={"doc": {'status': 'sleepy'}},
version=12)
def test_update_raise_DocumentNotFound_when_not_found(self):
self.mock_es.update.side_effect = TransportError('regular test failure', 1)
self.assertRaises(DocumentNotFound, self.action_manager.update,
action_id='asdfsadf',
action_update_doc={'status': 'sleepy'})
def test_update_raise_DocumentExists_when_elasticsearch_returns_409(self):
self.mock_es.update.side_effect = TransportError(409, 'regular test failure')
self.assertRaises(DocumentExists, self.action_manager.update,
action_id='pepepepepe2321',
action_update_doc={'status': 'sleepy'})
def test_update_raise_StorageEngineError_when_db_raises(self):
self.mock_es.update.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.action_manager.update,
action_id='pepepepepe2321',
action_update_doc={'status': 'sleepy'})
class SessionTypeManager(unittest.TestCase):
def setUp(self):
self.mock_es = Mock()
self.session_manager = elastic.SessionTypeManager(self.mock_es, 'sessions')
def test_get_search_query(self):
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
q = self.session_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
expected_q = {'query': {'filtered': {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}},
{'term': {'session_id': 'my_doc_id'}}
]}}}}}
self.assertEqual(q, expected_q)
def test_update_ok(self):
self.mock_es.update.return_value = {
u'_id': u'd6c1e00d-b9c1-4eb3-8219-1e83c02af101',
u'_index': u'freezer',
u'_type': u'actions',
u'_version': 3
}
res = self.session_manager.update(session_id='poiuuiop7890',
session_update_doc={'status': 'sleepy', '_version': 12})
self.assertEqual(res, 3)
self.mock_es.update.assert_called_with(index=self.session_manager.index,
doc_type=self.session_manager.doc_type,
id='poiuuiop7890',
body={"doc": {'status': 'sleepy'}},
version=12)
def test_update_raise_DocumentNotFound_when_not_found(self):
self.mock_es.update.side_effect = TransportError('regular test failure', 1)
self.assertRaises(DocumentNotFound, self.session_manager.update,
session_id='asdfsadf',
session_update_doc={'status': 'sleepy'})
def test_update_raise_DocumentExists_when_elasticsearch_returns_409(self):
self.mock_es.update.side_effect = TransportError(409, 'regular test failure')
self.assertRaises(DocumentExists, self.session_manager.update,
session_id='pepepepepe2321',
session_update_doc={'status': 'sleepy'})
def test_update_raise_StorageEngineError_when_db_raises(self):
self.mock_es.update.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.session_manager.update,
session_id='pepepepepe2321',
session_update_doc={'status': 'sleepy'})
class TestElasticSearchEngine_backup(unittest.TestCase):
@ -569,3 +705,270 @@ class TestElasticSearchEngine_job(unittest.TestCase):
job_id=fake_job_0_job_id,
doc=get_fake_job_0())
self.assertEqual(res, 3)
class TestElasticSearchEngine_action(unittest.TestCase):
@patch('freezer_api.storage.elastic.logging')
@patch('freezer_api.storage.elastic.elasticsearch')
def setUp(self, mock_elasticsearch, mock_logging):
mock_elasticsearch.Elasticsearch.return_value = Mock()
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
self.eng.action_manager = Mock()
def test_get_action_userid_and_action_id_return_doc(self):
self.eng.action_manager.get.return_value = get_fake_action_0()
res = self.eng.get_action(user_id=fake_action_0['user_id'],
action_id=fake_action_0['action_id'])
self.assertEqual(res, fake_action_0)
self.eng.action_manager.get.assert_called_with(
fake_action_0['user_id'],
fake_action_0['action_id'])
def test_get_action_userid_and_action_id_return_none(self):
self.eng.action_manager.get.return_value = None
res = self.eng.get_action(user_id=fake_action_0['user_id'],
action_id=fake_action_0['action_id'])
self.assertEqual(res, None)
self.eng.action_manager.get.assert_called_with(
fake_action_0['user_id'],
fake_action_0['action_id'])
def test_get_action_with_userid_and_search_return_list(self):
self.eng.action_manager.search.return_value = \
[fake_action_0, fake_action_0]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_action(user_id=fake_action_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [fake_action_0, fake_action_0])
self.eng.action_manager.search.assert_called_with(
fake_action_0['user_id'],
search=my_search,
limit=15, offset=6)
def test_get_action_with_userid_and_search_return_empty_list(self):
self.eng.action_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_action(user_id=fake_action_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [])
self.eng.action_manager.search.assert_called_with(
fake_action_0['user_id'],
search=my_search,
limit=15, offset=6)
@patch('freezer_api.storage.elastic.ActionDoc')
def test_add_action_ok(self, mock_actiondoc):
mock_actiondoc.create.return_value = get_fake_action_0()
self.eng.action_manager.insert.return_value = (True, 1)
res = self.eng.add_action(user_id=fake_action_0['user_id'],
doc=get_fake_action_0())
self.assertEqual(res, fake_action_0['action_id'])
self.eng.action_manager.insert.assert_called_with(fake_action_0,
fake_action_0['action_id'])
def test_add_action_raises_StorageEngineError_when_manager_insert_raises(self):
self.eng.action_manager.get.return_value = None
self.eng.action_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_action,
user_id=fake_action_0['user_id'],
doc=get_fake_action_0())
def test_delete_action_ok(self):
self.eng.action_manager.delete.return_value = fake_action_0['action_id']
res = self.eng.delete_action(user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'])
self.assertEqual(res, fake_action_0['action_id'])
def test_delete_client_raises_StorageEngineError_when_es_delete_raises(self):
self.eng.action_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_action,
user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'])
def test_update_action_raises_DocumentNotFound_when_doc_not_exists(self):
self.eng.action_manager.get.side_effect = DocumentNotFound('regular test failure')
patch = {'action_id': 'black_milk'}
self.assertRaises(DocumentNotFound, self.eng.update_action,
user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'],
patch_doc=patch)
def test_update_action_raises_DocumentNotFound_when_update_raises_DocumentNotFound(self):
self.eng.action_manager.get.return_value = get_fake_action_0()
patch = {'action_id': 'black_milk'}
self.eng.action_manager.update.side_effect = DocumentNotFound('regular test failure')
self.assertRaises(DocumentNotFound, self.eng.update_action,
user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'],
patch_doc=patch)
def test_update_action_returns_new_doc_version(self):
self.eng.action_manager.get.return_value = get_fake_action_0()
patch = {'action_id': 'group_four'}
self.eng.action_manager.update.return_value = 11
res = self.eng.update_action(user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'],
patch_doc=patch)
self.assertEqual(res, 11)
def test_replace_action_raises_AccessForbidden_when_action_manager_raises_AccessForbidden(self):
self.eng.action_manager.get.side_effect = AccessForbidden('regular test failure')
self.eng.action_manager.insert.return_value = (True, 3)
self.assertRaises(AccessForbidden, self.eng.replace_action,
user_id=fake_action_0['action_id'],
action_id=fake_action_0['action_id'],
doc=get_fake_action_0())
# def test_replace_action_returns_ok_when_doc_is_new(self):
# self.eng.action_manager.get.side_effect = DocumentNotFound('regular test failure')
# self.eng.action_manager.insert.return_value = (True, 1)
# res = self.eng.replace_action(user_id=fake_action_0['action_id'],
# action_id=fake_action_0['action_id'],
# doc=get_fake_action_0())
# self.assertEqual(res, 1)
# def test_replace_action_returns_version_1_when_doc_is_overwritten(self):
# self.eng.action_manager.get.return_value = get_fake_action_0()
# self.eng.action_manager.insert.return_value = (False, 3)
# res = self.eng.replace_action(user_id=fake_action_0['action_id'],
# action_id=fake_action_0['action_id'],
# doc=get_fake_action_0())
# self.assertEqual(res, 3)
class TestElasticSearchEngine_session(unittest.TestCase):
@patch('freezer_api.storage.elastic.logging')
@patch('freezer_api.storage.elastic.elasticsearch')
def setUp(self, mock_elasticsearch, mock_logging):
mock_elasticsearch.Elasticsearch.return_value = Mock()
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
self.eng.session_manager = Mock()
def test_get_session_userid_and_session_id_return_doc(self):
self.eng.session_manager.get.return_value = get_fake_session_0()
res = self.eng.get_session(user_id=fake_session_0['user_id'],
session_id=fake_session_0['session_id'])
self.assertEqual(res, fake_session_0)
self.eng.session_manager.get.assert_called_with(fake_session_0['user_id'],
fake_session_0['session_id'])
def test_get_session_userid_and_session_id_return_none(self):
self.eng.session_manager.get.return_value = None
res = self.eng.get_session(user_id=fake_session_0['user_id'],
session_id=fake_session_0['session_id'])
self.assertEqual(res, None)
self.eng.session_manager.get.assert_called_with(
fake_session_0['user_id'],
fake_session_0['session_id'])
def test_get_session_with_userid_and_search_return_list(self):
self.eng.session_manager.search.return_value = \
[fake_session_0, fake_session_0]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_session(user_id=fake_session_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [fake_session_0, fake_session_0])
self.eng.session_manager.search.assert_called_with(
fake_session_0['user_id'],
search=my_search,
limit=15, offset=6)
def test_get_session_with_userid_and_search_return_empty_list(self):
self.eng.session_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_session(user_id=fake_session_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [])
self.eng.session_manager.search.assert_called_with(
fake_session_0['user_id'],
search=my_search,
limit=15, offset=6)
@patch('freezer_api.storage.elastic.SessionDoc')
def test_add_session_ok(self, mock_sessiondoc):
mock_sessiondoc.create.return_value = get_fake_session_0()
self.eng.session_manager.insert.return_value = (True, 1)
res = self.eng.add_session(user_id=fake_session_0['user_id'],
doc=get_fake_session_0())
self.assertEqual(res, fake_session_0['session_id'])
self.eng.session_manager.insert.assert_called_with(fake_session_0,
fake_session_0['session_id'])
def test_add_session_raises_StorageEngineError_when_manager_insert_raises(self):
self.eng.session_manager.get.return_value = None
self.eng.session_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_session,
user_id=fake_session_0['user_id'],
doc=get_fake_session_0())
def test_delete_session_ok(self):
self.eng.session_manager.delete.return_value = fake_session_0['session_id']
res = self.eng.delete_session(user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'])
self.assertEqual(res, fake_session_0['session_id'])
def test_delete_client_raises_StorageEngineError_when_es_delete_raises(self):
self.eng.session_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_session,
user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'])
def test_update_session_raises_DocumentNotFound_when_doc_not_exists(self):
self.eng.session_manager.get.side_effect = DocumentNotFound('regular test failure')
patch = {'session_id': 'black_milk'}
self.assertRaises(DocumentNotFound, self.eng.update_session,
user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
patch_doc=patch)
def test_update_session_raises_DocumentNotFound_when_update_raises_DocumentNotFound(self):
self.eng.session_manager.get.return_value = get_fake_session_0()
patch = {'session_id': 'black_milk'}
self.eng.session_manager.update.side_effect = DocumentNotFound('regular test failure')
self.assertRaises(DocumentNotFound, self.eng.update_session,
user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
patch_doc=patch)
def test_update_session_returns_new_doc_version(self):
self.eng.session_manager.get.return_value = get_fake_session_0()
patch = {'session_id': 'group_four'}
self.eng.session_manager.update.return_value = 11
res = self.eng.update_session(user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
patch_doc=patch)
self.assertEqual(res, 11)
def test_replace_session_raises_AccessForbidden_when_session_manager_raises_AccessForbidden(self):
self.eng.session_manager.get.side_effect = AccessForbidden('regular test failure')
self.eng.session_manager.insert.return_value = (True, 3)
self.assertRaises(AccessForbidden, self.eng.replace_session,
user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
doc=get_fake_session_0())
def test_replace_session_returns_ok_when_doc_is_new(self):
self.eng.session_manager.get.side_effect = DocumentNotFound('regular test failure')
self.eng.session_manager.insert.return_value = (True, 1)
res = self.eng.replace_session(user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
doc=get_fake_session_0())
self.assertEqual(res, 1)
def test_replace_session_returns_version_1_when_doc_is_overwritten(self):
self.eng.session_manager.get.return_value = get_fake_session_0()
self.eng.session_manager.insert.return_value = (False, 3)
res = self.eng.replace_session(user_id=fake_session_0['session_id'],
session_id=fake_session_0['session_id'],
doc=get_fake_session_0())
self.assertEqual(res, 3)

View File

@ -1,6 +1,6 @@
"""Freezer swift.py related tests
Copyright 2014 Hewlett-Packard
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@ -1,3 +1,25 @@
"""Freezer swift.py related tests
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch

View File

@ -0,0 +1,435 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch
import random
import falcon
from common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import sessions as v1_sessions
class TestSessionsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_session_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.search_session.return_value = []
expected_result = {'sessions': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.search_session.return_value = [get_fake_session_0(), get_fake_session_1()]
expected_result = {'sessions': [get_fake_session_0(), get_fake_session_1()]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_session.return_value = fake_session_0['session_id']
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
session = get_fake_session_0()
self.mock_req.context['doc'] = session
self.mock_db.add_session.return_value = 'pjiofrdslaikfunr'
expected_result = {'session_id': 'pjiofrdslaikfunr'}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
# assigned_session_id = self.mock_req.context['doc']['session_id']
# self.assertNotEqual(assigned_session_id, fake_session_0['session_id'])
class TestSessionsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_session_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_session.return_value = None
self.resource.on_get(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_session.return_value = get_fake_session_0()
self.resource.on_get(self.mock_req, self.mock_req, fake_session_0['session_id'])
result = self.mock_req.context['result']
self.assertEqual(result, get_fake_session_0())
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_session_0['session_id'])
result = self.mock_req.context['result']
expected_result = {'session_id': fake_session_0['session_id']}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)
def test_on_patch_ok_with_some_fields(self):
new_version = random.randint(0, 99)
self.mock_db.update_session.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_result = {'session_id': fake_session_0['session_id'],
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.mock_db.update_session.assert_called_with(
user_id=fake_session_0['user_id'],
session_id=fake_session_0['session_id'],
patch_doc=patch_doc)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
def test_on_post_ok(self):
new_version = random.randint(0, 99)
self.mock_db.replace_session.return_value = new_version
session = get_fake_session_0()
self.mock_req.context['doc'] = session
expected_result = {'session_id': fake_session_0['session_id'],
'version': new_version}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
def test_on_post_raises_when_db_replace_session_raises(self):
self.mock_db.replace_session.side_effect = AccessForbidden('regular test failure')
session = get_fake_session_0()
self.mock_req.context['doc'] = session
self.assertRaises(AccessForbidden, self.resource.on_post,
self.mock_req,
self.mock_req,
fake_session_0['session_id'])
class TestSessionsAction(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_session_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsAction(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsAction)
def test_on_post_raises_when_unable_to_read_action_from_body(self):
self.mock_req.context['doc'] = {}
self.assertRaises(BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req,
fake_session_0['session_id'])
def test_on_post_start_action_ok(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 5
}}
self.mock_req.context['doc'] = action
expected_result = {'result': 'success',
'session_tag': 6}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.context['result'], expected_result)
def test_on_post_start_action_raises_BadDataFormat_when_job_not_in_session(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'missedme',
"current_tag": 5
}}
self.mock_req.context['doc'] = action
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req,
self.mock_req, fake_session_0['session_id'])
def test_on_post_start_action_raises_BadDataFormat_when_current_tag_too_high(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'missedme',
"current_tag": 6
}}
self.mock_req.context['doc'] = action
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req,
self.mock_req, fake_session_0['session_id'])
def test_on_post_end_action_ok(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"end": {
"job_id": 'job_id_2',
"current_tag": 5,
"result": "success"
}}
self.mock_req.context['doc'] = action
expected_result = {'result': 'success',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.context['result'], expected_result)
def test_on_post_end_action_raises_BadDataFormat_when_job_not_in_session(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"end": {
"job_id": 'ahahahahah',
"current_tag": 5,
"result": "success"
}}
self.mock_req.context['doc'] = action
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req,
self.mock_req, fake_session_0['session_id'])
def test_on_post_raises_MethodNotImplemented_when_methon_not_implemented(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"method_not_implemented": {
"job_id": 'ahahahahah',
"current_tag": 5,
"result": "success"
}}
self.mock_req.context['doc'] = action
self.assertRaises(MethodNotImplemented, self.resource.on_post, self.mock_req,
self.mock_req, fake_session_0['session_id'])
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_succeeds_in_holdoff_if_tag_needs_not_increment(self, mock_time):
mock_time.time.return_value = 1000
new_version = random.randint(0, 99)
session_doc = get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 4
}}
self.mock_req.context['doc'] = action
expected_result = {'result': 'success',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.context['result'], expected_result)
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_replies_holdoff_if_tag_would_increment(self, mock_time):
mock_time.time.return_value = 1000
new_version = random.randint(0, 99)
session_doc = get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 5
}}
self.mock_req.context['doc'] = action
expected_result = {'result': 'hold-off',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.context['result'], expected_result)
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_outofholdoff_replies_outofsync_when_tag_too_low(self, mock_time):
mock_time.time.return_value = 2000
new_version = random.randint(0, 99)
session_doc = get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 2
}}
self.mock_req.context['doc'] = action
expected_result = {'result': 'out-of-sync',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req, fake_session_0['session_id'])
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.context['result'], expected_result)
class TestSessions(unittest.TestCase):
def setUp(self):
self.session_doc = {}
self.session = v1_sessions.Session(self.session_doc)
def test_create_resource(self):
self.assertIsInstance(self.session, v1_sessions.Session)
def test_overall_result_running(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'running',
'result': ''}}
res = self.session.get_job_overall_result()
self.assertEquals(res, 'running')
def test_overall_result_fail(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'completed',
'result': 'fail'}}
res = self.session.get_job_overall_result()
self.assertEquals(res, 'fail')
def test_overall_result_success(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'completed',
'result': 'success'}}
res = self.session.get_job_overall_result()
self.assertEquals(res, 'success')
class TestSessionsJobs(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_session_0['user_id']
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsJob(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsJob)
def test_on_put_adds_job_to_session_jobs(self):
session = get_fake_session_0()
job = get_fake_job_0()
job_info = {job['job_id']: {'client_id': job['client_id'],
'status': job['job_schedule']['status'],
'result': job['job_schedule']['result'],
'time_started': job['job_schedule']['time_started'],
'time_ended': job['job_schedule']['time_ended']}}
session_update_doc = {'jobs': job_info}
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
self.resource.on_put(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_session.assert_called_with(user_id=session['user_id'],
session_id=session['session_id'],
patch_doc=session_update_doc)
def test_on_put_updates_job_with_session_info(self):
session = get_fake_session_0()
job = get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
job_update_doc = {
'session_id': session['session_id'],
'session_tag': session['session_tag'],
'job_schedule': session['schedule']
}
self.resource.on_put(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_job.assert_called_with(user_id=session['user_id'],
job_id=job['job_id'],
patch_doc=job_update_doc)
def test_on_delete_removes_job_from_session_jobs(self):
session = get_fake_session_0()
updated_session = get_fake_session_1()
job = get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
self.resource.on_delete(self.mock_req, self.mock_req,
session['session_id'],
'job_id_2')
self.mock_db.replace_session.assert_called_with(user_id=session['user_id'],
session_id=session['session_id'],
doc=updated_session)
def test_on_delete_removes_session_info_from_job_and_stops_job(self):
session = get_fake_session_0()
job = get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
job_update_doc = {
'session_id': '',
'session_tag': '',
'job_event': 'stop'
}
self.resource.on_delete(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_job.assert_called_with(user_id=session['user_id'],
job_id=job['job_id'],
patch_doc=job_update_doc)

View File

@ -30,10 +30,10 @@ from common import *
DATA_backup_metadata = {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 12341234,
"level": 0,
"time_stamp": 12341234,
"curr_backup_level": 0,
"backup_session": 12341234,
"max_level": 5,
"mode" : "fs",
@ -65,10 +65,10 @@ DATA_wrapped_backup_metadata = {
'backup_id': DATA_backup_id,
'backup_medatada': {
"container": "freezer_container",
"host_name": "alpha",
"hostname": "alpha",
"backup_name": "important_data_backup",
"timestamp": 12341234,
"level": 0,
"time_stamp": 12341234,
"curr_backup_level": 0,
"backup_session": 12341234,
"max_level": 5,
"mode": "fs",
@ -184,3 +184,113 @@ class TestJobDoc(unittest.TestCase):
self.assertRaises(BadDataFormat, utils.JobDoc.create, job_doc, 'dude')
class TestActionDoc(unittest.TestCase):
def test_validate_ok_when_data_ok(self):
action_doc = get_fake_action_0()
res = utils.ActionDoc.validate(action_doc)
self.assertIsNone(res)
def test_validate_raises_BadDataFormat_when_doc_has_no_actionid(self):
action_doc = get_fake_action_0()
action_doc.pop('action_id')
self.assertRaises(BadDataFormat, utils.ActionDoc.validate, action_doc)
def test_validate_raises_BadDataFormat_when_doc_has_no_userid(self):
action_doc = get_fake_action_0()
action_doc.pop('user_id')
self.assertRaises(BadDataFormat, utils.ActionDoc.validate, action_doc)
def test_validate_raises_BadDataFormat_when_doc_has_invalid_field(self):
action_doc = get_fake_action_0()
action_doc['action_id'] = 44
self.assertRaises(BadDataFormat, utils.ActionDoc.validate, action_doc)
def test_validate_patch_raises_when_doc_has_invalid_field(self):
action_doc = get_fake_action_0()
action_doc['action_id'] = 44
self.assertRaises(BadDataFormat, utils.ActionDoc.validate_patch, action_doc)
def test_createpatch_pops_actionid_and_userid(self):
action_doc = get_fake_action_0()
res_doc = utils.ActionDoc.create_patch(action_doc)
self.assertFalse('action_id' in res_doc)
self.assertFalse('user_id' in res_doc)
def test_createpatch_raises_BadDataFormat_when_patch_has_invalid_field(self):
action_doc = get_fake_action_0()
action_doc['action'] = 44
self.assertRaises(BadDataFormat, utils.ActionDoc.create_patch, action_doc)
@patch('freezer_api.common.utils.uuid')
def test_create_inserts_correct_uuid(self, mock_uuid):
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
action_doc = get_fake_action_0()
res_doc = utils.ActionDoc.create(action_doc, 'dude')
self.assertEqual(res_doc['user_id'], 'dude')
self.assertEqual(res_doc['action_id'], 'hotforteacher')
@patch('freezer_api.common.utils.uuid')
def test_create_raises_BadDataFormat_when_isvalid_fails(self, mock_uuid):
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
action_doc = get_fake_action_0()
action_doc['action'] = 44
self.assertRaises(BadDataFormat, utils.ActionDoc.create, action_doc, 'dude')
class TestSessionDoc(unittest.TestCase):
def test_validate_ok_when_data_ok(self):
session_doc = get_fake_session_0()
res = utils.SessionDoc.validate(session_doc)
self.assertIsNone(res)
def test_validate_raises_BadDataFormat_when_doc_has_no_sessionid(self):
session_doc = get_fake_session_0()
session_doc.pop('session_id')
self.assertRaises(BadDataFormat, utils.SessionDoc.validate, session_doc)
def test_validate_raises_BadDataFormat_when_doc_has_no_userid(self):
session_doc = get_fake_session_0()
session_doc.pop('user_id')
self.assertRaises(BadDataFormat, utils.SessionDoc.validate, session_doc)
def test_validate_raises_BadDataFormat_when_doc_has_invalid_field(self):
session_doc = get_fake_session_0()
session_doc['session_id'] = 44
self.assertRaises(BadDataFormat, utils.SessionDoc.validate, session_doc)
def test_validate_patch_raises_when_doc_has_invalid_field(self):
session_doc = get_fake_session_0()
session_doc['session_id'] = 44
self.assertRaises(BadDataFormat, utils.SessionDoc.validate_patch, session_doc)
def test_createpatch_pops_sessionid_and_userid(self):
session_doc = get_fake_session_0()
res_doc = utils.SessionDoc.create_patch(session_doc)
self.assertFalse('session_id' in res_doc)
self.assertFalse('user_id' in res_doc)
def test_createpatch_raises_BadDataFormat_when_patch_has_invalid_field(self):
session_doc = get_fake_session_0()
session_doc['session_tag'] = 'ouch'
self.assertRaises(BadDataFormat, utils.SessionDoc.create_patch, session_doc)
@patch('freezer_api.common.utils.uuid')
def test_create_inserts_correct_uuid(self, mock_uuid):
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
session_doc = get_fake_session_0()
res_doc = utils.SessionDoc.create(session_doc, 'dude')
self.assertEqual(res_doc['user_id'], 'dude')
self.assertEqual(res_doc['session_id'], 'hotforteacher')
@patch('freezer_api.common.utils.uuid')
def test_create_raises_BadDataFormat_when_isvalid_fails(self, mock_uuid):
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
session_doc = get_fake_session_0()
session_doc['time_started'] = 'ouch'
self.assertRaises(BadDataFormat, utils.SessionDoc.create, session_doc, 'dude')

View File

@ -14,6 +14,7 @@ deps =
falcon
keystonemiddleware
elasticsearch
jsonschema
install_command = pip install -U {opts} {packages}
setenv = VIRTUAL_ENV={envdir}

View File

@ -7,6 +7,8 @@ python-novaclient>=2.21.0
docutils>=0.8.1
pymysql
pymongo
apscheduler
pep3143daemon
[testing]
pytest

View File

@ -92,7 +92,7 @@ setup(
},
entry_points={
'console_scripts': [
'freezerc=freezer.main:freezer_main'
'freezer-scheduler=freezer.scheduler.freezer_scheduler:main'
]
},
data_files=[('freezer/scripts', ['freezer/scripts/vss.ps1']),

View File

@ -0,0 +1,132 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch
from freezer.apiclient import exceptions
from freezer.apiclient import actions
class TestActionManager(unittest.TestCase):
def setUp(self):
self.mock_client = Mock()
self.mock_response = Mock()
self.mock_client.endpoint = 'http://testendpoint:9999'
self.mock_client.auth_token = 'testtoken'
self.mock_client.client_id = 'test_client_id_78900987'
self.action_manager = actions.ActionManager(self.mock_client)
@patch('freezer.apiclient.actions.requests')
def test_create(self, mock_requests):
self.assertEqual(self.action_manager.endpoint, 'http://testendpoint:9999/v1/actions/')
self.assertEqual(self.action_manager.headers, {'X-Auth-Token': 'testtoken'})
@patch('freezer.apiclient.actions.requests')
def test_create_ok(self, mock_requests):
self.mock_response.status_code = 201
self.mock_response.json.return_value = {'action_id': 'qwerqwer'}
mock_requests.post.return_value = self.mock_response
retval = self.action_manager.create({'action': 'metadata'})
self.assertEqual(retval, 'qwerqwer')
@patch('freezer.apiclient.actions.requests')
def test_create_fail_when_api_return_error_code(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.post.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.action_manager.create, {'action': 'metadata'})
@patch('freezer.apiclient.actions.requests')
def test_delete_ok(self, mock_requests):
self.mock_response.status_code = 204
mock_requests.delete.return_value = self.mock_response
retval = self.action_manager.delete('test_action_id')
self.assertIsNone(retval)
@patch('freezer.apiclient.actions.requests')
def test_delete_fail(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.delete.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.action_manager.delete, 'test_action_id')
@patch('freezer.apiclient.actions.requests')
def test_get_ok(self, mock_requests):
self.mock_response.status_code = 200
self.mock_response.json.return_value = {'action_id': 'qwerqwer'}
mock_requests.get.return_value = self.mock_response
retval = self.action_manager.get('test_action_id')
self.assertEqual(retval, {'action_id': 'qwerqwer'})
@patch('freezer.apiclient.actions.requests')
def test_get_fails_on_error_different_from_404(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.get.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.action_manager.get, 'test_action_id')
@patch('freezer.apiclient.actions.requests')
def test_get_none(self, mock_requests):
self.mock_response.status_code = 404
mock_requests.get.return_value = self.mock_response
retval = self.action_manager.get('test_action_id')
self.assertIsNone(retval)
@patch('freezer.apiclient.actions.requests')
def test_list_ok(self, mock_requests):
self.mock_response.status_code = 200
action_list = [{'action_id_0': 'bomboloid'}, {'action_id_1': 'asdfasdf'}]
self.mock_response.json.return_value = {'actions': action_list}
mock_requests.get.return_value = self.mock_response
retval = self.action_manager.list()
self.assertEqual(retval, action_list)
@patch('freezer.apiclient.actions.requests')
def test_list_error(self, mock_requests):
self.mock_response.status_code = 404
action_list = [{'action_id_0': 'bomboloid'}, {'action_id_1': 'asdfasdf'}]
self.mock_response.json.return_value = {'clients': action_list}
mock_requests.get.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.action_manager.list)
@patch('freezer.apiclient.actions.requests')
def test_update_ok(self, mock_requests):
self.mock_response.status_code = 200
self.mock_response.json.return_value = {
"patch": {"status": "bamboozled"},
"version": 12,
"action_id": "d454beec-1f3c-4d11-aa1a-404116a40502"
}
mock_requests.patch.return_value = self.mock_response
retval = self.action_manager.update('d454beec-1f3c-4d11-aa1a-404116a40502', {'status': 'bamboozled'})
self.assertEqual(retval, 12)
@patch('freezer.apiclient.actions.requests')
def test_update_raise_MetadataUpdateFailure_when_api_return_error_code(self, mock_requests):
self.mock_response.json.return_value = {
"patch": {"status": "bamboozled"},
"version": 12,
"action_id": "d454beec-1f3c-4d11-aa1a-404116a40502"
}
self.mock_response.status_code = 404
self.mock_response.text = '{"title": "Not Found","description":"No document found with ID d454beec-1f3c-4d11-aa1a-404116a40502x"}'
mock_requests.patch.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.action_manager.update,
'd454beec-1f3c-4d11-aa1a-404116a40502', {'status': 'bamboozled'})

View File

@ -87,6 +87,13 @@ class TestRegistrationManager(unittest.TestCase):
retval = self.r.get('test_client_id')
self.assertIsNone(retval)
@patch('freezer.apiclient.registration.requests')
def test_get_raises_ApiClientException_on_error_not_404(self, mock_requests):
mock_response = Mock()
mock_response.status_code = 500
mock_requests.get.return_value = mock_response
self.assertRaises(exceptions.ApiClientException, self.r.get, 'test_client_id')
@patch('freezer.apiclient.registration.requests')
def test_list_ok(self, mock_requests):
mock_response = Mock()

View File

@ -0,0 +1,227 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import json
import unittest
from mock import Mock, patch
from freezer.apiclient import exceptions
from freezer.apiclient import sessions
class TestSessionManager(unittest.TestCase):
def setUp(self):
self.mock_client = Mock()
self.mock_response = Mock()
self.mock_client.endpoint = 'http://testendpoint:9999'
self.mock_client.auth_token = 'testtoken'
self.mock_client.client_id = 'test_client_id_78900987'
self.session_manager = sessions.SessionManager(self.mock_client)
self.endpoint = 'http://testendpoint:9999/v1/sessions/'
self.headers = {'X-Auth-Token': 'testtoken'}
@patch('freezer.apiclient.sessions.requests')
def test_create(self, mock_requests):
self.assertEqual(self.session_manager.endpoint, self.endpoint)
self.assertEqual(self.session_manager.headers, self.headers)
@patch('freezer.apiclient.sessions.requests')
def test_create_ok(self, mock_requests):
self.mock_response.status_code = 201
self.mock_response.json.return_value = {'session_id': 'qwerqwer'}
mock_requests.post.return_value = self.mock_response
retval = self.session_manager.create({'session': 'metadata'})
self.assertEqual(retval, 'qwerqwer')
@patch('freezer.apiclient.sessions.requests')
def test_create_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.post.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.create, {'session': 'metadata'})
@patch('freezer.apiclient.sessions.requests')
def test_delete_ok(self, mock_requests):
self.mock_response.status_code = 204
mock_requests.delete.return_value = self.mock_response
retval = self.session_manager.delete('test_session_id')
self.assertIsNone(retval)
@patch('freezer.apiclient.sessions.requests')
def test_delete_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.delete.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.delete, 'test_session_id')
@patch('freezer.apiclient.sessions.requests')
def test_get_ok(self, mock_requests):
self.mock_response.status_code = 200
self.mock_response.json.return_value = {'session_id': 'qwerqwer'}
mock_requests.get.return_value = self.mock_response
retval = self.session_manager.get('test_session_id')
self.assertEqual(retval, {'session_id': 'qwerqwer'})
@patch('freezer.apiclient.sessions.requests')
def test_get_raise_ApiClientException_when_api_return_error_different_from_404(self, mock_requests):
self.mock_response.status_code = 500
mock_requests.get.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.get, 'test_session_id')
@patch('freezer.apiclient.sessions.requests')
def test_get_none(self, mock_requests):
self.mock_response.status_code = 404
mock_requests.get.return_value = self.mock_response
retval = self.session_manager.get('test_session_id')
self.assertIsNone(retval)
@patch('freezer.apiclient.sessions.requests')
def test_list_ok(self, mock_requests):
self.mock_response.status_code = 200
session_list = [{'session_id_0': 'bomboloid'}, {'session_id_1': 'asdfasdf'}]
self.mock_response.json.return_value = {'sessions': session_list}
mock_requests.get.return_value = self.mock_response
retval = self.session_manager.list()
self.assertEqual(retval, session_list)
@patch('freezer.apiclient.sessions.requests')
def test_list_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
self.mock_response.status_code = 404
session_list = [{'session_id_0': 'bomboloid'}, {'session_id_1': 'asdfasdf'}]
self.mock_response.json.return_value = {'clients': session_list}
mock_requests.get.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.list)
@patch('freezer.apiclient.sessions.requests')
def test_update_ok(self, mock_requests):
self.mock_response.status_code = 200
self.mock_response.json.return_value = {
"patch": {"status": "bamboozled"},
"version": 12,
"session_id": "d454beec-1f3c-4d11-aa1a-404116a40502"
}
mock_requests.patch.return_value = self.mock_response
retval = self.session_manager.update('d454beec-1f3c-4d11-aa1a-404116a40502', {'status': 'bamboozled'})
self.assertEqual(retval, 12)
@patch('freezer.apiclient.sessions.requests')
def test_update_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
self.mock_response.json.return_value = {
"patch": {"status": "bamboozled"},
"version": 12,
"session_id": "d454beec-1f3c-4d11-aa1a-404116a40502"
}
self.mock_response.status_code = 404
self.mock_response.text = '{"title": "Not Found","description":"No document found with ID d454beec-1f3c-4d11-aa1a-404116a40502x"}'
mock_requests.patch.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.update,
'd454beec-1f3c-4d11-aa1a-404116a40502', {'status': 'bamboozled'})
@patch('freezer.apiclient.sessions.requests')
def test_add_job_uses_proper_endpoint(self, mock_requests):
session_id, job_id = 'sessionqwerty1234', 'jobqwerty1234'
self.mock_response.status_code = 204
mock_requests.put.return_value = self.mock_response
endpoint = '{0}{1}/jobs/{2}'.format(self.endpoint, session_id, job_id)
retval = self.session_manager.add_job(session_id, job_id)
self.assertEqual(retval, None)
mock_requests.put.assert_called_with(endpoint, headers=self.headers)
@patch('freezer.apiclient.sessions.requests')
def test_add_job_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
session_id, job_id = 'sessionqwerty1234', 'jobqwerty1234'
self.mock_response.status_code = 500
mock_requests.put.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.add_job, session_id, job_id)
@patch('freezer.apiclient.sessions.requests')
def test_remove_job_uses_proper_endpoint(self, mock_requests):
session_id, job_id = 'sessionqwerty1234', 'jobqwerty1234'
self.mock_response.status_code = 204
mock_requests.delete.return_value = self.mock_response
endpoint = '{0}{1}/jobs/{2}'.format(self.endpoint, session_id, job_id)
retval = self.session_manager.remove_job(session_id, job_id)
self.assertEqual(retval, None)
mock_requests.delete.assert_called_with(endpoint, headers=self.headers)
@patch('freezer.apiclient.sessions.requests')
def test_remove_job_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
session_id, job_id = 'sessionqwerty1234', 'jobqwerty1234'
self.mock_response.status_code = 500
mock_requests.delete.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.remove_job, session_id, job_id)
@patch('freezer.apiclient.sessions.requests')
def test_start_session_posts_proper_data(self, mock_requests):
session_id, job_id, tag = 'sessionqwerty1234', 'jobqwerty1234', 23
self.mock_response.status_code = 202
self.mock_response.json.return_value = {'result': 'success', 'session_tag': 24}
mock_requests.post.return_value = self.mock_response
# /v1/sessions/{sessions_id}/action
endpoint = '{0}{1}/action'.format(self.endpoint, session_id)
data = {"start": {"current_tag": 23, "job_id": "jobqwerty1234"}}
retval = self.session_manager.start_session(session_id, job_id, tag)
self.assertEqual(retval, {'result': 'success', 'session_tag': 24})
args = mock_requests.post.call_args[0]
kwargs = mock_requests.post.call_args[1]
self.assertEquals(endpoint, args[0])
self.assertEquals(data, json.loads(kwargs['data']))
self.assertEquals(self.headers, kwargs['headers'])
@patch('freezer.apiclient.sessions.requests')
def test_start_session_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
session_id, job_id, tag = 'sessionqwerty1234', 'jobqwerty1234', 23
self.mock_response.status_code = 500
self.mock_response.json.return_value = {'result': 'success', 'session_tag': 24}
mock_requests.post.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.start_session,
session_id, job_id, tag)
@patch('freezer.apiclient.sessions.requests')
def test_end_session_posts_proper_data(self, mock_requests):
session_id, job_id, tag = 'sessionqwerty1234', 'jobqwerty1234', 23
self.mock_response.status_code = 202
self.mock_response.json.return_value = {'result': 'success', 'session_tag': 24}
mock_requests.post.return_value = self.mock_response
# /v1/sessions/{sessions_id}/action
endpoint = '{0}{1}/action'.format(self.endpoint, session_id)
data = {"end": {"current_tag": 23, "job_id": "jobqwerty1234", "result": "fail"}}
retval = self.session_manager.end_session(session_id, job_id, tag, 'fail')
self.assertEqual(retval, {'result': 'success', 'session_tag': 24})
args = mock_requests.post.call_args[0]
kwargs = mock_requests.post.call_args[1]
self.assertEquals(endpoint, args[0])
self.assertEquals(data, json.loads(kwargs['data']))
self.assertEquals(self.headers, kwargs['headers'])
@patch('freezer.apiclient.sessions.requests')
def test_end_session_raise_ApiClientException_when_api_return_error_code(self, mock_requests):
session_id, job_id, tag = 'sessionqwerty1234', 'jobqwerty1234', 23
self.mock_response.status_code = 500
self.mock_response.json.return_value = {'result': 'success', 'session_tag': 24}
mock_requests.post.return_value = self.mock_response
self.assertRaises(exceptions.ApiClientException, self.session_manager.end_session,
session_id, job_id, tag, 'fail')

View File

@ -0,0 +1,84 @@
# Copyright 2015 Hewlett-Packard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
import unittest
from mock import Mock, patch
from freezer.scheduler import arguments
class TestOpenstackOptions(unittest.TestCase):
def setUp(self):
self.args = Mock()
self.args.os_username = 'janedoe'
self.args.os_tenant_name = 'hertenant'
self.args.os_auth_url = 'herauthurl'
self.args.os_password = 'herpassword'
self.args.os_tenant_id = 'hertenantid'
self.args.os_region_name = 'herregion'
self.args.os_endpoint = 'herpublicurl'
self.empty_args = Mock()
self.empty_args.os_username = ''
self.empty_args.os_tenant_name = ''
self.empty_args.os_auth_url = ''
self.empty_args.os_password = ''
self.empty_args.os_tenant_id = ''
self.empty_args.os_region_name = ''
self.empty_args.os_endpoint = ''
self.env_dict = {
'OS_USERNAME': 'johndoe',
'OS_TENANT_NAME': 'histenant',
'OS_AUTH_URL': 'hisauthurl',
'OS_PASSWORD': 'hispassword',
'OS_TENANT_ID': 'histenantid',
'OS_REGION_NAME': 'hisregion',
'OS_SERVICE_ENDPOINT': 'hispublicurl'
}
def test_create_with_args_and_env(self):
os = arguments.OpenstackOptions(self.args, self.env_dict)
self.assertIsInstance(os, arguments.OpenstackOptions)
def test_create_with_empty_args_and_empty_env(self):
os = arguments.OpenstackOptions(self.empty_args, self.env_dict)
self.assertIsInstance(os, arguments.OpenstackOptions)
def test_create_with_args_and_empty_env(self):
os = arguments.OpenstackOptions(self.args, {})
self.assertIsInstance(os, arguments.OpenstackOptions)
def test_create_raises_Exception_when_missing_username(self):
self.args.os_username = ''
self.assertRaises(Exception, arguments.OpenstackOptions, self.args, {})
def test_create_raises_Exception_when_missing_p(self):
self.args.os_password = ''
self.assertRaises(Exception, arguments.OpenstackOptions, self.args, {})
def test_create_raises_Exception_when_missing_parameter(self):
self.args.os_username = ''
self.assertRaises(Exception, arguments.OpenstackOptions, self.args, {})
def test_str(self):
os = arguments.OpenstackOptions(self.args, self.env_dict)
s = str(os)
self.assertIsInstance(s, str)
class TestGetArgs(unittest.TestCase):
@patch('freezer.scheduler.arguments.argparse.ArgumentParser')
def test_get_args_calls_add_argument(self, mock_ArgumentParser):
mock_arg_parser = Mock()
mock_ArgumentParser.return_value = mock_arg_parser
retval = arguments.get_args(['alpha', 'bravo'])
call_count = mock_arg_parser.add_argument.call_count
self.assertGreater(call_count, 15)

View File

@ -0,0 +1,56 @@
# Copyright 2015 Hewlett-Packard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
import unittest
from mock import Mock, patch
import signal
from freezer.scheduler import daemon
class TestOpenstackOptions(unittest.TestCase):
def setUp(self):
self.daemonizable = Mock()
self.daemon = daemon.Daemon(daemonizable=self.daemonizable)
def test_create(self):
self.assertIsInstance(self.daemon, daemon.Daemon)
@patch('freezer.scheduler.daemon.logging')
def test_setup_logging_default(self, mock_logging):
res = self.daemon.setup_logging(None)
self.assertEqual(res, '/var/log/freezer-scheduler.log')
@patch('freezer.scheduler.daemon.create_dir')
@patch('freezer.scheduler.daemon.logging')
def test_setup_logging_userdefined(self, mock_logging, mock_createdir):
res = self.daemon.setup_logging('mylogfile')
self.assertEqual(res, 'mylogfile')
def test_handle_program_exit_calls_scheduler_stop(self):
self.daemon.handle_program_exit(Mock(), Mock())
self.daemonizable.stop.assert_called_with()
def test_handle_program_reload_calls_scheduler_reload(self):
self.daemon.handle_reload(Mock(), Mock())
self.daemonizable.reload.assert_called_with()
def test_signal_map_handlers(self):
signal_map = self.daemon.signal_map
self.assertEqual(signal_map[signal.SIGTERM], self.daemon.handle_program_exit)
self.assertEqual(signal_map[signal.SIGHUP], self.daemon.handle_reload)
@patch('freezer.scheduler.daemon.gettempdir')
@patch('freezer.scheduler.daemon.os.path.expanduser')
def test_pid_fname_in_tempdir(self, mock_expanduser, mock_gettempdir):
mock_expanduser.return_value = '/home/chet'
mock_gettempdir.return_value = '/tempus_fugit'
retval = self.daemon.pid_fname
self.assertEqual(retval, '/tempus_fugit/freezer_sched_chet.pid')

View File

@ -13,6 +13,8 @@ deps =
pymysql
python-openstackclient
mock
pep3143daemon
apscheduler
pylint>=1.3.1
install_command = pip install -U {opts} {packages}