Move CacheStorage to its own module.
And rename it to JobCache. The word "Cache" already implies "Storage" so it makes more sense to name it after the things that it stores...Jobs! Change-Id: If7a7a56e8f6a3637f62e285eaa7ba989f67002dc
This commit is contained in:
parent
e75062d103
commit
e9a295b1fb
@ -23,13 +23,12 @@ import operator
|
||||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import tempfile
|
||||
import time
|
||||
import xml.etree.ElementTree as XML
|
||||
import yaml
|
||||
|
||||
import jenkins
|
||||
|
||||
from jenkins_jobs.cache import JobCache
|
||||
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
|
||||
from jenkins_jobs.parallel import concurrent
|
||||
from jenkins_jobs import utils
|
||||
@ -43,96 +42,6 @@ logger = logging.getLogger(__name__)
|
||||
_DEFAULT_TIMEOUT = object()
|
||||
|
||||
|
||||
class CacheStorage(object):
|
||||
# ensure each instance of the class has a reference to the required
|
||||
# modules so that they are available to be used when the destructor
|
||||
# is being called since python will not guarantee that it won't have
|
||||
# removed global module references during teardown.
|
||||
_logger = logger
|
||||
_os = os
|
||||
_tempfile = tempfile
|
||||
_yaml = yaml
|
||||
|
||||
def __init__(self, jenkins_url, flush=False):
|
||||
cache_dir = self.get_cache_dir()
|
||||
# One cache per remote Jenkins URL:
|
||||
host_vary = re.sub('[^A-Za-z0-9\-\~]', '_', jenkins_url)
|
||||
self.cachefilename = os.path.join(
|
||||
cache_dir, 'cache-host-jobs-' + host_vary + '.yml')
|
||||
if flush or not os.path.isfile(self.cachefilename):
|
||||
self.data = {}
|
||||
else:
|
||||
with io.open(self.cachefilename, 'r', encoding='utf-8') as yfile:
|
||||
self.data = yaml.load(yfile)
|
||||
logger.debug("Using cache: '{0}'".format(self.cachefilename))
|
||||
|
||||
@staticmethod
|
||||
def get_cache_dir():
|
||||
home = os.path.expanduser('~')
|
||||
if home == '~':
|
||||
raise OSError('Could not locate home folder')
|
||||
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
||||
os.path.join(home, '.cache')
|
||||
path = os.path.join(xdg_cache_home, 'jenkins_jobs')
|
||||
if not os.path.isdir(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as ose:
|
||||
# it could happen that two jjb instances are running at the
|
||||
# same time and that the other instance created the directory
|
||||
# after we made the check, in which case there is no error
|
||||
if ose.errno != errno.EEXIST:
|
||||
raise ose
|
||||
return path
|
||||
|
||||
def set(self, job, md5):
|
||||
self.data[job] = md5
|
||||
|
||||
def clear(self):
|
||||
self.data.clear()
|
||||
|
||||
def is_cached(self, job):
|
||||
if job in self.data:
|
||||
return True
|
||||
return False
|
||||
|
||||
def has_changed(self, job, md5):
|
||||
if job in self.data and self.data[job] == md5:
|
||||
return False
|
||||
return True
|
||||
|
||||
def save(self):
|
||||
# use self references to required modules in case called via __del__
|
||||
# write to tempfile under same directory and then replace to avoid
|
||||
# issues around corruption such the process be killed
|
||||
tfile = self._tempfile.NamedTemporaryFile(dir=self.get_cache_dir(),
|
||||
delete=False)
|
||||
tfile.write(self._yaml.dump(self.data).encode('utf-8'))
|
||||
# force contents to be synced on disk before overwriting cachefile
|
||||
tfile.flush()
|
||||
self._os.fsync(tfile.fileno())
|
||||
tfile.close()
|
||||
try:
|
||||
self._os.rename(tfile.name, self.cachefilename)
|
||||
except OSError:
|
||||
# On Windows, if dst already exists, OSError will be raised even if
|
||||
# it is a file. Remove the file first in that case and try again.
|
||||
self._os.remove(self.cachefilename)
|
||||
self._os.rename(tfile.name, self.cachefilename)
|
||||
|
||||
self._logger.debug("Cache written out to '%s'" % self.cachefilename)
|
||||
|
||||
def __del__(self):
|
||||
# check we initialized sufficiently in case called
|
||||
# due to an exception occurring in the __init__
|
||||
if getattr(self, 'data', None) is not None:
|
||||
try:
|
||||
self.save()
|
||||
except Exception as e:
|
||||
self._logger.error("Failed to write to cache file '%s' on "
|
||||
"exit: %s" % (self.cachefilename, e))
|
||||
|
||||
|
||||
class JenkinsManager(object):
|
||||
|
||||
def __init__(self, jjb_config):
|
||||
@ -146,8 +55,8 @@ class JenkinsManager(object):
|
||||
else:
|
||||
self.jenkins = jenkins.Jenkins(url, user, password)
|
||||
|
||||
self.cache = CacheStorage(jjb_config.jenkins['url'],
|
||||
flush=jjb_config.builder['flush_cache'])
|
||||
self.cache = JobCache(jjb_config.jenkins['url'],
|
||||
flush=jjb_config.builder['flush_cache'])
|
||||
|
||||
self._plugins_list = jjb_config.builder['plugins_info']
|
||||
self._jobs = None
|
||||
|
117
jenkins_jobs/cache.py
Normal file
117
jenkins_jobs/cache.py
Normal file
@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (C) 2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Manage jobs in Jenkins server
|
||||
|
||||
import errno
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JobCache(object):
|
||||
# ensure each instance of the class has a reference to the required
|
||||
# modules so that they are available to be used when the destructor
|
||||
# is being called since python will not guarantee that it won't have
|
||||
# removed global module references during teardown.
|
||||
_logger = logger
|
||||
_os = os
|
||||
_tempfile = tempfile
|
||||
_yaml = yaml
|
||||
|
||||
def __init__(self, jenkins_url, flush=False):
|
||||
cache_dir = self.get_cache_dir()
|
||||
# One cache per remote Jenkins URL:
|
||||
host_vary = re.sub('[^A-Za-z0-9\-\~]', '_', jenkins_url)
|
||||
self.cachefilename = os.path.join(
|
||||
cache_dir, 'cache-host-jobs-' + host_vary + '.yml')
|
||||
if flush or not os.path.isfile(self.cachefilename):
|
||||
self.data = {}
|
||||
else:
|
||||
with io.open(self.cachefilename, 'r', encoding='utf-8') as yfile:
|
||||
self.data = yaml.load(yfile)
|
||||
logger.debug("Using cache: '{0}'".format(self.cachefilename))
|
||||
|
||||
@staticmethod
|
||||
def get_cache_dir():
|
||||
home = os.path.expanduser('~')
|
||||
if home == '~':
|
||||
raise OSError('Could not locate home folder')
|
||||
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
||||
os.path.join(home, '.cache')
|
||||
path = os.path.join(xdg_cache_home, 'jenkins_jobs')
|
||||
if not os.path.isdir(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as ose:
|
||||
# it could happen that two jjb instances are running at the
|
||||
# same time and that the other instance created the directory
|
||||
# after we made the check, in which case there is no error
|
||||
if ose.errno != errno.EEXIST:
|
||||
raise ose
|
||||
return path
|
||||
|
||||
def set(self, job, md5):
|
||||
self.data[job] = md5
|
||||
|
||||
def clear(self):
|
||||
self.data.clear()
|
||||
|
||||
def is_cached(self, job):
|
||||
if job in self.data:
|
||||
return True
|
||||
return False
|
||||
|
||||
def has_changed(self, job, md5):
|
||||
if job in self.data and self.data[job] == md5:
|
||||
return False
|
||||
return True
|
||||
|
||||
def save(self):
|
||||
# use self references to required modules in case called via __del__
|
||||
# write to tempfile under same directory and then replace to avoid
|
||||
# issues around corruption such the process be killed
|
||||
tfile = self._tempfile.NamedTemporaryFile(dir=self.get_cache_dir(),
|
||||
delete=False)
|
||||
tfile.write(self._yaml.dump(self.data).encode('utf-8'))
|
||||
# force contents to be synced on disk before overwriting cachefile
|
||||
tfile.flush()
|
||||
self._os.fsync(tfile.fileno())
|
||||
tfile.close()
|
||||
try:
|
||||
self._os.rename(tfile.name, self.cachefilename)
|
||||
except OSError:
|
||||
# On Windows, if dst already exists, OSError will be raised even if
|
||||
# it is a file. Remove the file first in that case and try again.
|
||||
self._os.remove(self.cachefilename)
|
||||
self._os.rename(tfile.name, self.cachefilename)
|
||||
|
||||
self._logger.debug("Cache written out to '%s'" % self.cachefilename)
|
||||
|
||||
def __del__(self):
|
||||
# check we initialized sufficiently in case called
|
||||
# due to an exception occurring in the __init__
|
||||
if getattr(self, 'data', None) is not None:
|
||||
try:
|
||||
self.save()
|
||||
except Exception as e:
|
||||
self._logger.error("Failed to write to cache file '%s' on "
|
||||
"exit: %s" % (self.cachefilename, e))
|
@ -22,7 +22,7 @@ from tests.base import mock
|
||||
from testtools import TestCase
|
||||
|
||||
|
||||
@mock.patch('jenkins_jobs.builder.CacheStorage', mock.MagicMock)
|
||||
@mock.patch('jenkins_jobs.builder.JobCache', mock.MagicMock)
|
||||
class TestCaseTestBuilder(LoggingFixture, TestCase):
|
||||
def setUp(self):
|
||||
super(TestCaseTestBuilder, self).setUp()
|
||||
|
@ -21,21 +21,21 @@ from tests.base import LoggingFixture
|
||||
from tests.base import mock
|
||||
|
||||
|
||||
class TestCaseCacheStorage(LoggingFixture, testtools.TestCase):
|
||||
class TestCaseJobCache(LoggingFixture, testtools.TestCase):
|
||||
|
||||
@mock.patch('jenkins_jobs.builder.CacheStorage.get_cache_dir',
|
||||
@mock.patch('jenkins_jobs.builder.JobCache.get_cache_dir',
|
||||
lambda x: '/bad/file')
|
||||
def test_save_on_exit(self):
|
||||
"""
|
||||
Test that the cache is saved on normal object deletion
|
||||
"""
|
||||
|
||||
with mock.patch('jenkins_jobs.builder.CacheStorage.save') as save_mock:
|
||||
with mock.patch('jenkins_jobs.builder.JobCache.save') as save_mock:
|
||||
with mock.patch('os.path.isfile', return_value=False):
|
||||
jenkins_jobs.builder.CacheStorage("dummy")
|
||||
jenkins_jobs.builder.JobCache("dummy")
|
||||
save_mock.assert_called_with()
|
||||
|
||||
@mock.patch('jenkins_jobs.builder.CacheStorage.get_cache_dir',
|
||||
@mock.patch('jenkins_jobs.builder.JobCache.get_cache_dir',
|
||||
lambda x: '/bad/file')
|
||||
def test_cache_file(self):
|
||||
"""
|
||||
@ -44,4 +44,4 @@ class TestCaseCacheStorage(LoggingFixture, testtools.TestCase):
|
||||
test_file = os.path.abspath(__file__)
|
||||
with mock.patch('os.path.join', return_value=test_file):
|
||||
with mock.patch('yaml.load'):
|
||||
jenkins_jobs.builder.CacheStorage("dummy").data = None
|
||||
jenkins_jobs.builder.JobCache("dummy").data = None
|
||||
|
@ -13,12 +13,12 @@ class CmdTestsBase(LoggingFixture, testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(CmdTestsBase, self).setUp()
|
||||
|
||||
# Testing the cmd module can sometimes result in the CacheStorage class
|
||||
# Testing the cmd module can sometimes result in the JobCache class
|
||||
# attempting to create the cache directory multiple times as the tests
|
||||
# are run in parallel. Stub out the CacheStorage to ensure that each
|
||||
# are run in parallel. Stub out the JobCache to ensure that each
|
||||
# test can safely create the cache directory without risk of
|
||||
# interference.
|
||||
cache_patch = mock.patch('jenkins_jobs.builder.CacheStorage',
|
||||
cache_patch = mock.patch('jenkins_jobs.builder.JobCache',
|
||||
autospec=True)
|
||||
self.cache_mock = cache_patch.start()
|
||||
self.addCleanup(cache_patch.stop)
|
||||
|
@ -25,11 +25,11 @@ def fake_os_walk(paths):
|
||||
return os_walk
|
||||
|
||||
|
||||
# Testing the utils module can sometimes result in the CacheStorage class
|
||||
# Testing the utils module can sometimes result in the JobCache class
|
||||
# attempting to create the cache directory multiple times as the tests
|
||||
# are run in parallel. Stub out the CacheStorage to ensure that each
|
||||
# are run in parallel. Stub out the JobCache to ensure that each
|
||||
# test can safely create the object without effect.
|
||||
@mock.patch('jenkins_jobs.builder.CacheStorage', mock.MagicMock)
|
||||
@mock.patch('jenkins_jobs.builder.JobCache', mock.MagicMock)
|
||||
class CmdRecursePath(testtools.TestCase):
|
||||
|
||||
@mock.patch('jenkins_jobs.utils.os.walk')
|
||||
|
Loading…
Reference in New Issue
Block a user