Allow for multiple jobs to be executed at the same time

Change-Id: I2fa6f989a21f7964cc573010f58d68ad003788d5
This commit is contained in:
Pierre-Arthur MATHIEU 2017-02-27 15:16:05 +00:00
parent 1a58dd23f5
commit ffd99c4a2e
6 changed files with 61 additions and 48 deletions

View File

@ -136,3 +136,9 @@
# Initialize freezer scheduler with insecure mode (boolean value)
#insecure = false
# Number of jobs that can be executed at the same time. (integer value)
# By default only one job is allowed at a given time because there is no
# built-in protection to prevent backup and restore to be done at the same time
# on one resource.
#concurrent_jobs = 1

View File

@ -98,6 +98,11 @@ def get_common_opts():
dest='disable_exec',
help='Allow Freezer Scheduler to deny jobs that execute '
'commands for security reasons'),
cfg.IntOpt('concurrent_jobs',
default=1,
dest='concurrent_jobs',
help='Number of jobs that can be executed at the'
' same time'),
]
return _COMMON

View File

@ -43,7 +43,7 @@ LOG = log.getLogger(__name__)
class FreezerScheduler(object):
def __init__(self, apiclient, interval, job_path):
def __init__(self, apiclient, interval, job_path, concurrent_jobs=1):
# config_manager
self.client = apiclient
self.freezerc_executable = spawn.find_executable('freezer-agent')
@ -56,14 +56,14 @@ class FreezerScheduler(object):
self.job_path = job_path
self._client = None
self.lock = threading.Lock()
self.execution_lock = threading.Lock()
job_defaults = {
'coalesce': True,
'max_instances': 2
'max_instances': 1
}
executors = {
'default': {'type': 'threadpool', 'max_workers': 1},
'threadpool': {'type': 'threadpool', 'max_workers': 10}
'threadpool': {'type': 'threadpool',
'max_workers': concurrent_jobs}
}
self.scheduler = background.BackgroundScheduler(
job_defaults=job_defaults,
@ -226,7 +226,8 @@ def main():
freezer_utils.create_dir(CONF.jobs_dir, do_log=False)
freezer_scheduler = FreezerScheduler(apiclient=apiclient,
interval=int(CONF.interval),
job_path=CONF.jobs_dir)
job_path=CONF.jobs_dir,
concurrent_jobs=CONF.concurrent_jobs)
if CONF.no_daemon:
print('Freezer Scheduler running in no-daemon mode')
@ -240,7 +241,8 @@ def main():
daemon = win_daemon.Daemon(daemonizable=freezer_scheduler,
interval=int(CONF.interval),
job_path=CONF.jobs_dir,
insecure=CONF.insecure)
insecure=CONF.insecure,
concurrent_jobs=CONF.concurrent_jobs)
else:
daemon = linux_daemon.Daemon(daemonizable=freezer_scheduler)

View File

@ -382,8 +382,6 @@ class Job(object):
' retrying in {2} seconds'
.format(self.id, action_name,
max_retries_interval))
# sleeping with the bloody lock, but we don't want other
# actions to mess with our stuff like fs snapshots, do we ?
time.sleep(max_retries_interval)
else:
# SUCCESS
@ -419,7 +417,6 @@ class Job(object):
def execute(self):
result = Job.SUCCESS_RESULT
with self.scheduler.execution_lock:
with self.scheduler.lock:
LOG.info('job {0} running'.format(self.id))
self.state = RunningState

View File

@ -62,7 +62,7 @@ class Daemon(object):
instance
"""
def __init__(self, daemonizable=None, interval=None, job_path=None,
insecure=False):
insecure=False, concurrent_jobs=1):
self.service_name = 'FreezerService'
self.home = r'C:\.freezer'
# this is only need it in order to have the same interface as in linux
@ -70,6 +70,7 @@ class Daemon(object):
self.interval = interval or 60
self.job_path = job_path or r'C:\.freezer\scheduler\conf.d'
self.insecure = insecure
self.concurrent_jobs = concurrent_jobs
@utils.shield
def start(self, log_file=None):
@ -85,6 +86,7 @@ class Daemon(object):
# send arguments info to the windows service
os.environ['SERVICE_JOB_PATH'] = self.job_path
os.environ['SERVICE_INTERVAL'] = str(self.interval)
os.environ['SERVICE_CONCURRENT_JOBS'] = str(self.concurrent_jobs)
winutils.save_environment(self.home)

View File

@ -99,7 +99,8 @@ class PySvc(win32serviceutil.ServiceFramework):
scheduler = FreezerScheduler(
apiclient=client, interval=int(os.environ['SERVICE_INTERVAL']),
job_path=os.environ['SERVICE_JOB_PATH'])
job_path=os.environ['SERVICE_JOB_PATH'],
concurrent_jobs=int(os.environ['SERVICE_CONCURRENT_JOBS']))
scheduler.start()