Resolve bashate and pep8 warnings
This update addresses existing bashate and pep8 warnings in cgcs-patch, patch-alarm, and tsconfig. The bulk of these updates are style and spacing changes, such as whitespace style conventions. Story: 2003371 Task: 24433 Change-Id: I44b26d24788907bac0730a952d70ed4bafb87d90 Signed-off-by: Don Penney <don.penney@windriver.com>
This commit is contained in:
parent
e5afe88d43
commit
ae0314279f
cgcs-patch
bin
patch_check_goenabled.shsw-patch-agent-init.shsw-patch-controller-daemon-init.shsw-patch-controller-init.shsw-patch-init.sh
cgcs-patch
cgcs_make_patch
cgcs_patch
__init__.py
api
authapi
base.pycertificates.pyconfig.pyexceptions.pypatch_agent.pypatch_client.pypatch_controller.pypatch_functions.pypatch_signing.pypatch_verify.pycgcs_patch_id
setup.pypatch-alarm/patch-alarm
tox.initsconfig/tsconfig
@ -13,14 +13,12 @@ SYSTEM_CHANGED_FLAG=/var/run/node_is_patched
|
||||
|
||||
logfile=/var/log/patching.log
|
||||
|
||||
function LOG()
|
||||
{
|
||||
function LOG {
|
||||
logger "$NAME: $*"
|
||||
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||
}
|
||||
|
||||
if [ -f $SYSTEM_CHANGED_FLAG ]
|
||||
then
|
||||
if [ -f $SYSTEM_CHANGED_FLAG ]; then
|
||||
LOG "Node has been patched. Failing goenabled check."
|
||||
exit 1
|
||||
fi
|
||||
|
@ -22,7 +22,7 @@ DAEMON="/usr/sbin/sw-patch-agent"
|
||||
PIDFILE="/var/run/sw-patch-agent.pid"
|
||||
PATCH_INSTALLING_FILE="/var/run/patch_installing"
|
||||
|
||||
start()
|
||||
start()
|
||||
{
|
||||
if [ -e $PIDFILE ]; then
|
||||
PIDDIR=/proc/$(cat $PIDFILE)
|
||||
@ -47,18 +47,15 @@ start()
|
||||
fi
|
||||
}
|
||||
|
||||
stop()
|
||||
stop()
|
||||
{
|
||||
if [ -f $PATCH_INSTALLING_FILE ]
|
||||
then
|
||||
if [ -f $PATCH_INSTALLING_FILE ]; then
|
||||
echo "Patches are installing. Waiting for install to complete."
|
||||
while [ -f $PATCH_INSTALLING_FILE ]
|
||||
do
|
||||
while [ -f $PATCH_INSTALLING_FILE ]; do
|
||||
# Verify the agent is still running
|
||||
pid=$(cat $PATCH_INSTALLING_FILE)
|
||||
cat /proc/$pid/cmdline 2>/dev/null | grep -q $DAEMON
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Patch agent not running."
|
||||
break
|
||||
fi
|
||||
|
@ -21,7 +21,7 @@ DESC="sw-patch-controller-daemon"
|
||||
DAEMON="/usr/sbin/sw-patch-controller-daemon"
|
||||
PIDFILE="/var/run/sw-patch-controller-daemon.pid"
|
||||
|
||||
start()
|
||||
start()
|
||||
{
|
||||
if [ -e $PIDFILE ]; then
|
||||
PIDDIR=/proc/$(cat $PIDFILE)
|
||||
@ -46,7 +46,7 @@ start()
|
||||
fi
|
||||
}
|
||||
|
||||
stop()
|
||||
stop()
|
||||
{
|
||||
echo -n "Stopping $DESC..."
|
||||
start-stop-daemon --stop --quiet --pidfile $PIDFILE
|
||||
|
@ -20,21 +20,17 @@ PATCHING_DIR=/opt/patching
|
||||
|
||||
logfile=/var/log/patching.log
|
||||
|
||||
function LOG()
|
||||
{
|
||||
function LOG {
|
||||
logger "$NAME: $*"
|
||||
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||
}
|
||||
|
||||
function LOG_TO_FILE()
|
||||
{
|
||||
function LOG_TO_FILE {
|
||||
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||
}
|
||||
|
||||
function create_groups()
|
||||
{
|
||||
if [ -f $GROUPS_FILE ]
|
||||
then
|
||||
function create_groups {
|
||||
if [ -f $GROUPS_FILE ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
@ -45,11 +41,9 @@ function create_groups()
|
||||
EOF
|
||||
}
|
||||
|
||||
function do_setup()
|
||||
{
|
||||
function do_setup {
|
||||
# Does the repo exist?
|
||||
if [ ! -d $REPO_DIR ]
|
||||
then
|
||||
if [ ! -d $REPO_DIR ]; then
|
||||
LOG "Creating repo"
|
||||
mkdir -p $REPO_DIR
|
||||
|
||||
@ -59,8 +53,7 @@ function do_setup()
|
||||
createrepo -g $GROUPS_FILE $REPO_DIR >> $logfile 2>&1
|
||||
fi
|
||||
|
||||
if [ ! -d $PATCHING_DIR ]
|
||||
then
|
||||
if [ ! -d $PATCHING_DIR ]; then
|
||||
LOG "Creating $PATCHING_DIR"
|
||||
mkdir -p $PATCHING_DIR
|
||||
fi
|
||||
@ -68,8 +61,7 @@ function do_setup()
|
||||
# If we can ping the active controller, sync the repos
|
||||
LOG_TO_FILE "ping -c 1 -w 1 controller"
|
||||
ping -c 1 -w 1 controller >> $logfile 2>&1 || ping6 -c 1 -w 1 controller >> $logfile 2>&1
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
if [ $? -ne 0 ]; then
|
||||
LOG "Cannot ping controller. Nothing to do"
|
||||
return 0
|
||||
fi
|
||||
|
@ -15,15 +15,12 @@ NAME=$(basename $0)
|
||||
|
||||
logfile=/var/log/patching.log
|
||||
|
||||
function LOG_TO_FILE()
|
||||
{
|
||||
function LOG_TO_FILE {
|
||||
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||
}
|
||||
|
||||
function check_for_rr_patch()
|
||||
{
|
||||
if [ -f /var/run/node_is_patched_rr ]
|
||||
then
|
||||
function check_for_rr_patch {
|
||||
if [ -f /var/run/node_is_patched_rr ]; then
|
||||
echo
|
||||
echo "Node has been patched and requires an immediate reboot."
|
||||
echo
|
||||
@ -32,14 +29,11 @@ function check_for_rr_patch()
|
||||
fi
|
||||
}
|
||||
|
||||
function check_install_uuid()
|
||||
{
|
||||
function check_install_uuid {
|
||||
# Check whether our installed load matches the active controller
|
||||
CONTROLLER_UUID=`curl -sf http://controller/feed/rel-${SW_VERSION}/install_uuid`
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
if [ "$HOSTNAME" = "controller-1" ]
|
||||
then
|
||||
if [ $? -ne 0 ]; then
|
||||
if [ "$HOSTNAME" = "controller-1" ]; then
|
||||
# If we're on controller-1, controller-0 may not have the install_uuid
|
||||
# matching this release, if we're in an upgrade. If the file doesn't exist,
|
||||
# bypass this check
|
||||
@ -51,8 +45,7 @@ function check_install_uuid()
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]
|
||||
then
|
||||
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]; then
|
||||
LOG_TO_FILE "This node is running a different load than the active controller and must be reinstalled"
|
||||
echo "This node is running a different load than the active controller and must be reinstalled"
|
||||
return 1
|
||||
@ -69,8 +62,7 @@ if [ -f /etc/platform/installation_failed ] ; then
|
||||
fi
|
||||
|
||||
# Clean up the RPM DB
|
||||
if [ ! -f /var/run/.rpmdb_cleaned ]
|
||||
then
|
||||
if [ ! -f /var/run/.rpmdb_cleaned ]; then
|
||||
LOG_TO_FILE "Cleaning RPM DB"
|
||||
rm -f /var/lib/rpm/__db*
|
||||
touch /var/run/.rpmdb_cleaned
|
||||
@ -82,28 +74,24 @@ fi
|
||||
DELAY_SEC=120
|
||||
START=`date +%s`
|
||||
FOUND=0
|
||||
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]
|
||||
do
|
||||
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]; do
|
||||
ping -c 1 controller > /dev/null 2>&1 || ping6 -c 1 controller > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
if [ $? -eq 0 ]; then
|
||||
FOUND=1
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [ ${FOUND} -eq 0 ]
|
||||
then
|
||||
# 'controller' is not available, just exit
|
||||
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
|
||||
exit 1
|
||||
if [ ${FOUND} -eq 0 ]; then
|
||||
# 'controller' is not available, just exit
|
||||
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
if [ "${system_mode}" = "simplex" ]
|
||||
then
|
||||
if [ "${system_mode}" = "simplex" ]; then
|
||||
# On a simplex CPE, we need to launch the http server first,
|
||||
# before we can do the patch installation
|
||||
LOG_TO_FILE "***** Launching lighttpd *****"
|
||||
@ -117,8 +105,7 @@ case "$1" in
|
||||
/etc/init.d/lighttpd stop
|
||||
else
|
||||
check_install_uuid
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
if [ $? -ne 0 ]; then
|
||||
# The INSTALL_UUID doesn't match the active controller, so exit
|
||||
exit 1
|
||||
fi
|
||||
|
@ -35,11 +35,11 @@ LOCAL_PATCH_DATA_DIR = "export/patch_data"
|
||||
ORDER_FILE = "patch_order"
|
||||
ARCH_DEFAULT = "x86_64"
|
||||
|
||||
METADATA_TAGS = [ 'ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION',
|
||||
'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS',
|
||||
'UNREMOVABLE', 'REBOOT_REQUIRED' ]
|
||||
RMP_EXCLUDES = [ '-dev-', '-dbg-', '-doc-' ]
|
||||
BUILD_TYPES = [ 'std', 'rt' ]
|
||||
METADATA_TAGS = ['ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION',
|
||||
'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS',
|
||||
'UNREMOVABLE', 'REBOOT_REQUIRED']
|
||||
RMP_EXCLUDES = ['-dev-', '-dbg-', '-doc-']
|
||||
BUILD_TYPES = ['std', 'rt']
|
||||
|
||||
|
||||
SAME = 0
|
||||
@ -74,7 +74,7 @@ capture_source_flag = False
|
||||
capture_rpms_flag = False
|
||||
|
||||
capture_source_path = None
|
||||
|
||||
|
||||
logfile = "/var/log/patching.log"
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -96,21 +96,22 @@ def configure_logging(logtofile=True, level=logging.DEBUG):
|
||||
else:
|
||||
logging.basicConfig(level=level)
|
||||
|
||||
|
||||
def rev_lt(num1, num2):
|
||||
n1w=num1.split('.')
|
||||
n2w=num2.split('.')
|
||||
n1w = num1.split('.')
|
||||
n2w = num2.split('.')
|
||||
while True:
|
||||
try:
|
||||
n1=int(n1w.pop(0))
|
||||
except:
|
||||
n1 = int(n1w.pop(0))
|
||||
except:
|
||||
return True
|
||||
try:
|
||||
n2=int(n2w.pop(0))
|
||||
except:
|
||||
n2 = int(n2w.pop(0))
|
||||
except:
|
||||
return False
|
||||
if n1<n2:
|
||||
if n1 < n2:
|
||||
return True
|
||||
if n1>n2:
|
||||
if n1 > n2:
|
||||
return False
|
||||
|
||||
|
||||
@ -126,6 +127,7 @@ def add_text_tag_to_xml(parent, name, text):
|
||||
tag.text = text
|
||||
return tag
|
||||
|
||||
|
||||
def handle_exception(exc_type, exc_value, exc_traceback):
|
||||
"""
|
||||
Exception handler to log any uncaught exceptions
|
||||
@ -134,6 +136,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):
|
||||
exc_info=(exc_type, exc_value, exc_traceback))
|
||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||
|
||||
|
||||
def write_xml_file(top, fname):
|
||||
# Generate the file, in a readable format if possible
|
||||
outfile = open(fname, 'w')
|
||||
@ -147,6 +150,7 @@ def write_xml_file(top, fname):
|
||||
else:
|
||||
outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" "))
|
||||
|
||||
|
||||
class PatchRecipeError(Exception):
|
||||
"""Base class for patch recipe exceptions."""
|
||||
|
||||
@ -156,26 +160,32 @@ class PatchRecipeError(Exception):
|
||||
def __str__(self):
|
||||
return self.message or ""
|
||||
|
||||
|
||||
class PatchRecipeXMLFail(PatchRecipeError):
|
||||
"""Problem parsing XML of patch recipe."""
|
||||
pass
|
||||
|
||||
|
||||
class PatchBuildFail(PatchRecipeError):
|
||||
"""Problem Compiling the patch."""
|
||||
pass
|
||||
|
||||
|
||||
class PatchPackagingFail(PatchRecipeError):
|
||||
"""Problem assembling the patch."""
|
||||
pass
|
||||
|
||||
|
||||
class PatchPackagingMiss(PatchRecipeError):
|
||||
"""Problem assembling the patch - might be correctable."""
|
||||
pass
|
||||
|
||||
|
||||
class PatchRequirementFail(PatchRecipeError):
|
||||
"""Missing Requirement."""
|
||||
pass
|
||||
|
||||
|
||||
class PatchRecipeCmdFail(PatchRecipeError):
|
||||
"""Shell command Failure."""
|
||||
pass
|
||||
@ -241,7 +251,7 @@ class PatchList:
|
||||
if patch == patch_id:
|
||||
return self.patch_data[patch]
|
||||
return None
|
||||
|
||||
|
||||
def _validate_patch_order(self):
|
||||
fix_local_order = False
|
||||
remote_order = []
|
||||
@ -266,7 +276,7 @@ class PatchList:
|
||||
break
|
||||
if fix_local_order:
|
||||
print "_validate_patch_order: fix patch order"
|
||||
f = open(self._std_local_path(self.order_file),'w')
|
||||
f = open(self._std_local_path(self.order_file), 'w')
|
||||
for patch_id in validated_order:
|
||||
f.write("%s\n" % patch_id)
|
||||
print "_validate_patch_order: %s" % patch_id
|
||||
@ -282,7 +292,7 @@ class PatchList:
|
||||
os.chdir(workdir)
|
||||
issue_cmd("mkdir -p %s" % self._std_remote_copy_path(""))
|
||||
os.chdir(self._std_remote_copy_path(""))
|
||||
|
||||
|
||||
if not os.path.isdir(self.patch_git):
|
||||
issue_cmd("git clone ssh://%s@vxgit.wrs.com:7999/cgcs/%s.git" % (os.environ['USER'], self.patch_git))
|
||||
os.chdir(self.patch_git)
|
||||
@ -327,7 +337,7 @@ class PatchList:
|
||||
for patch_id in self.patches_to_deliver:
|
||||
os.chdir(workdir)
|
||||
patch = "%s.patch" % patch_id
|
||||
print "signing patch '%s'" % self._std_local_path(patch)
|
||||
print "signing patch '%s'" % self._std_local_path(patch)
|
||||
|
||||
try:
|
||||
subprocess.check_call(["sign_patch_formal.sh", self._std_local_path(patch)])
|
||||
@ -377,7 +387,6 @@ class PatchList:
|
||||
xml_path = self._std_local_path(self._std_xml_patch_recipe_name(patch_id))
|
||||
self.add(xml_path, built=True, fix=False)
|
||||
|
||||
|
||||
def get_implicit_requires(self, patch_id, recipies):
|
||||
list = []
|
||||
for r in recipies:
|
||||
@ -513,7 +522,6 @@ class PatchList:
|
||||
|
||||
prd.gen_xml(fname=self._std_local_path(self._std_xml_patch_recipe_name(prd.patch_id)))
|
||||
|
||||
|
||||
def build_patches(self):
|
||||
global capture_source_flag
|
||||
# While unbuild patches exist
|
||||
@ -527,7 +535,7 @@ class PatchList:
|
||||
if rc:
|
||||
# This patch is ready to build, build it now
|
||||
print "Ready to build patch %s." % patch_id
|
||||
rc = prd.build_patch()
|
||||
rc = prd.build_patch()
|
||||
if rc:
|
||||
# append new built patch to order file
|
||||
issue_cmd("sed -i '/^%s$/d' %s" % (patch_id, self._std_local_path(self.order_file)))
|
||||
@ -544,8 +552,8 @@ class PatchList:
|
||||
if capture_source_flag:
|
||||
prd.capture_source()
|
||||
|
||||
# It is important to break here.
|
||||
# We just edited the patches_to_build which an enclosing for loop is iterating over.
|
||||
# It is important to break here.
|
||||
# We just edited the patches_to_build which an enclosing for loop is iterating over.
|
||||
# without the break, the result is skipping patches and/or building patches out of order.
|
||||
break
|
||||
else:
|
||||
@ -635,7 +643,6 @@ class PackageData:
|
||||
raise PatchRecipeXMLFail(msg)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def gen_xml(self, e_package):
|
||||
for personality in self.personalities:
|
||||
add_text_tag_to_xml(e_package, 'PERSONALITY', personality)
|
||||
@ -670,17 +677,17 @@ class PackageData:
|
||||
file_path = "%s/%s" % (rpm_dir, file)
|
||||
if os.path.isfile(file_path):
|
||||
print "cleaning match %s\n" % file
|
||||
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path ]
|
||||
rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path]
|
||||
rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
|
||||
if rpm_name == self.name:
|
||||
rpm_release_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path ]
|
||||
rpm_release_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path]
|
||||
rpm_release = issue_cmd_w_stdout(rpm_release_cmd)
|
||||
print "cleaning release %s" % rpm_release
|
||||
rm_cmd = "rm -f %s/%s-*-%s.%s.rpm" % (rpm_dir, self.name, rpm_release, arch)
|
||||
issue_cmd(rm_cmd)
|
||||
|
||||
def clean(self, prebuilt=False):
|
||||
print "package clean"
|
||||
print "package clean"
|
||||
self._clean_rpms(prebuilt=prebuilt)
|
||||
|
||||
def _add_rpms(self, pf, arch=ARCH_DEFAULT, fatal=True, prebuilt=False):
|
||||
@ -738,7 +745,7 @@ class PackageData:
|
||||
# break
|
||||
|
||||
if not reject:
|
||||
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file) ]
|
||||
rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file)]
|
||||
rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
|
||||
if rpm_name != self.name:
|
||||
print "reject file '%s' due to rpm_name '%s'" % (file, rpm_name)
|
||||
@ -784,6 +791,7 @@ class PackageData:
|
||||
raise PatchPackagingFail(msg)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
class RecipeData:
|
||||
"""
|
||||
Recipe data
|
||||
@ -791,7 +799,7 @@ class RecipeData:
|
||||
def __init__(self, e):
|
||||
self.name = None
|
||||
self.prebuilt = False
|
||||
self.packages = collections.OrderedDict() # map package name to PackageData
|
||||
self.packages = collections.OrderedDict() # map package name to PackageData
|
||||
self._parse_recipe(e)
|
||||
|
||||
def __str__(self):
|
||||
@ -863,7 +871,7 @@ class RecipeData:
|
||||
self.packages[package].gen_xml(e_package)
|
||||
|
||||
def clean(self):
|
||||
print "recipe clean"
|
||||
print "recipe clean"
|
||||
if not self.prebuilt:
|
||||
for package in self.packages:
|
||||
self.packages[package].clean(prebuilt=self.prebuilt)
|
||||
@ -890,7 +898,7 @@ class RecipeData:
|
||||
|
||||
if os.path.isfile(path):
|
||||
rc = issue_cmd_rc("%s %s %s >> %s/%s.log" % (path, self.name, extra_arg, os.environ['DEST'], os.environ['PREFIX']))
|
||||
|
||||
|
||||
def build_patch(self, pf, fatal=True):
|
||||
for package in self.packages:
|
||||
self.packages[package].build_patch(pf, fatal=fatal, prebuilt=self.prebuilt)
|
||||
@ -903,6 +911,7 @@ class RecipeData:
|
||||
print "=========== is_prebuilt prebuilt=%s for %s =============" % (self.prebuilt, self.name)
|
||||
return self.prebuilt
|
||||
|
||||
|
||||
class PatchRecipeData:
|
||||
"""
|
||||
Patch recipe data
|
||||
@ -951,7 +960,6 @@ class PatchRecipeData:
|
||||
rc = rc2
|
||||
return rc
|
||||
|
||||
|
||||
def set_implicit_requires(self, patch_list):
|
||||
self.auto_requires = patch_list.get_implicit_requires(self.patch_id, self.recipies.keys())
|
||||
|
||||
@ -1100,8 +1108,8 @@ class PatchRecipeData:
|
||||
|
||||
def recursive_print(self, e, depth=0):
|
||||
for child in e:
|
||||
print "%sTag: %s, attr: %s, text: %s" % (" "*depth, child.tag, child.attrib, child.text and child.text.strip() or "")
|
||||
self.recursive_print(child.getchildren(), depth+1)
|
||||
print "%sTag: %s, attr: %s, text: %s" % (" " * depth, child.tag, child.attrib, child.text and child.text.strip() or "")
|
||||
self.recursive_print(child.getchildren(), depth + 1)
|
||||
# for child in e.iter('BUILD'):
|
||||
# print "Tag: %s, attr: %s" % (child.tag, child.attrib)
|
||||
|
||||
@ -1162,7 +1170,7 @@ class PatchRecipeData:
|
||||
write_xml_file(e_top, fname)
|
||||
|
||||
def __str__(self):
|
||||
return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies,keys()))
|
||||
return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies, keys()))
|
||||
|
||||
def myprint(self, indent=""):
|
||||
print "patch_id: %s" % str(self.patch_id)
|
||||
@ -1205,7 +1213,7 @@ class PatchRecipeData:
|
||||
|
||||
if self.build_context is not None:
|
||||
# Before checkout, make sure there are no untracked temporary files
|
||||
# left by a previous build that may prevent the checkout...
|
||||
# left by a previous build that may prevent the checkout...
|
||||
# e.g. horizon's pbr-2015.1.0-py2.7.egg directory is a build artifact
|
||||
issue_cmd("for d in $(find . -type d -name .git | xargs --max-args=1 dirname); do (cd $d; echo $d; git clean -df; git reset --hard; git ls-files --others --exclude-standard | xargs --no-run-if-empty rm; if [ ! -f .subgits ]; then if [ -f .gitignore ]; then git ls-files --others --ignored --exclude-from=.gitignore | xargs --no-run-if-empty rm; fi; fi); done")
|
||||
issue_cmd("wrgit checkout %s" % self.build_context)
|
||||
@ -1229,7 +1237,6 @@ class PatchRecipeData:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _get_prev_patch_id(self, patch_id):
|
||||
patch_order_file = self.pl._std_local_path(self.pl.order_file)
|
||||
prev_patch_id = None
|
||||
@ -1240,7 +1247,7 @@ class PatchRecipeData:
|
||||
return prev_patch_id
|
||||
prev_patch_id = this_patch_id
|
||||
return prev_patch_id
|
||||
|
||||
|
||||
def _get_rpm_db_path(self, patch_id):
|
||||
rpm_db = self.pl._std_local_path("%s.rpm_db" % patch_id)
|
||||
return rpm_db
|
||||
@ -1257,7 +1264,7 @@ class PatchRecipeData:
|
||||
issue_cmd("rpm -qp --dbpath %s --queryformat '%s %%{NAME} %%{RELEASE}\n' %s/*rpm >> %s 2> /dev/null" % (temp_rpm_db_dir, subdir, rpm_sub_dir, rpm_db))
|
||||
|
||||
def _read_rpm_db(self, patch_id):
|
||||
release_map={}
|
||||
release_map = {}
|
||||
rpm_db_dir = "export/patch_data"
|
||||
rpm_db = self._get_rpm_db_path(patch_id)
|
||||
with open(rpm_db) as f:
|
||||
@ -1276,7 +1283,7 @@ class PatchRecipeData:
|
||||
delim = "_"
|
||||
words = self.patch_id.split(delim)
|
||||
l = len(words[-1])
|
||||
words[-1] = '0'*l
|
||||
words[-1] = '0' * l
|
||||
prev_patch_id = delim.join(words)
|
||||
prev_release_map = self._read_rpm_db(prev_patch_id)
|
||||
release_map = self._read_rpm_db(self.patch_id)
|
||||
@ -1290,7 +1297,7 @@ class PatchRecipeData:
|
||||
os.environ['DEST'] = "%s/export/patch_source/%s" % (os.environ['MY_PATCH_WORKSPACE'], self.patch_id)
|
||||
issue_cmd("mkdir -p %s" % os.environ['DEST'])
|
||||
for recipe in self.recipies.keys():
|
||||
print "capture source of recipe %s" % recipe
|
||||
print "capture source of recipe %s" % recipe
|
||||
self.recipies[recipe].capture_source()
|
||||
|
||||
def build_patch(self, local_path="."):
|
||||
@ -1305,8 +1312,8 @@ class PatchRecipeData:
|
||||
recipe_str += recipe + " "
|
||||
if not self.recipies[recipe].is_prebuilt():
|
||||
build_recipe_str += recipe + " "
|
||||
print "recipe_str = %s" % recipe_str
|
||||
print "build_recipe_str = %s" % build_recipe_str
|
||||
print "recipe_str = %s" % recipe_str
|
||||
print "build_recipe_str = %s" % build_recipe_str
|
||||
if recipe_str == "":
|
||||
msg = "No recipies for patch %s" % self.patch_id
|
||||
LOG.exception(msg)
|
||||
@ -1326,15 +1333,15 @@ class PatchRecipeData:
|
||||
if not pre_compiled_flag:
|
||||
# compile patch
|
||||
os.chdir(workdir)
|
||||
print "pre clean"
|
||||
print "pre clean"
|
||||
if build_recipe_str == "":
|
||||
print " ... nothing to clean"
|
||||
else:
|
||||
issue_cmd("build-pkgs --no-build-info --clean %s" % build_recipe_str)
|
||||
for recipe in self.recipies.keys():
|
||||
print "pre clean recipe %s" % recipe
|
||||
print "pre clean recipe %s" % recipe
|
||||
self.recipies[recipe].clean()
|
||||
print "Build"
|
||||
print "Build"
|
||||
if build_recipe_str == "":
|
||||
print " ... nothing to build"
|
||||
else:
|
||||
@ -1377,7 +1384,7 @@ class PatchRecipeData:
|
||||
if not pre_compiled_flag:
|
||||
self.recipies[recipe].build_patch(pf, fatal=True)
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
self.recipies[recipe].build_patch(pf, fatal=False)
|
||||
except PatchPackagingMiss:
|
||||
print "Warning: attempting rebuild of recipe %s" % self.recipies[recipe].name
|
||||
@ -1385,20 +1392,21 @@ class PatchRecipeData:
|
||||
issue_cmd("build-pkgs --no-build-info --careful %s" % self.recipies[recipe].name)
|
||||
self.recipies[recipe].build_patch(pf, fatal=True)
|
||||
|
||||
|
||||
local_path=self.pl._std_local_path("")
|
||||
local_path = self.pl._std_local_path("")
|
||||
print "=== local_path = %s ===" % local_path
|
||||
pf.gen_patch(outdir=local_path)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _tag_build_context():
|
||||
os.chdir(srcdir)
|
||||
issue_cmd("for e in . `wrgit all-core-gits` ; do (cd $e ; git tag v%s) done" % self.patch_id)
|
||||
|
||||
|
||||
def read_build_info():
|
||||
try:
|
||||
build_info_find_cmd = [ "find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm" ]
|
||||
build_info_find_cmd = ["find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm"]
|
||||
build_info_path = issue_cmd_w_stdout(build_info_find_cmd)
|
||||
if build_info_path == "":
|
||||
issue_cmd("build-pkgs --no-descendants build-info")
|
||||
@ -1411,7 +1419,7 @@ def read_build_info():
|
||||
continue
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
|
||||
name, var = line.partition("=")[::2]
|
||||
name = name.strip()
|
||||
var = var.strip()
|
||||
@ -1422,10 +1430,12 @@ def read_build_info():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def patch_id_to_tag(patch_id):
|
||||
tag = "v%s" % patch_id
|
||||
return tag
|
||||
|
||||
|
||||
def validate_tag(tag):
|
||||
try:
|
||||
cmd = "git tag | grep %s" % tag
|
||||
@ -1437,6 +1447,7 @@ def validate_tag(tag):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def issue_cmd_w_stdout(cmd):
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out = p.communicate()[0]
|
||||
@ -1447,7 +1458,7 @@ def issue_cmd_w_stdout(cmd):
|
||||
print msg
|
||||
raise PatchRecipeCmdFail(msg)
|
||||
return out
|
||||
|
||||
|
||||
|
||||
def issue_cmd(cmd):
|
||||
print "CMD: %s" % cmd
|
||||
@ -1458,6 +1469,7 @@ def issue_cmd(cmd):
|
||||
print msg
|
||||
raise PatchRecipeCmdFail(msg)
|
||||
|
||||
|
||||
def issue_cmd_no_raise(cmd):
|
||||
print "CMD: %s" % cmd
|
||||
rc = subprocess.call(cmd, shell=True)
|
||||
@ -1466,11 +1478,13 @@ def issue_cmd_no_raise(cmd):
|
||||
LOG.exception(msg)
|
||||
print msg
|
||||
|
||||
|
||||
def issue_cmd_rc(cmd):
|
||||
print "CMD: %s" % cmd
|
||||
rc = subprocess.call(cmd, shell=True)
|
||||
return rc
|
||||
|
||||
|
||||
def set_capture_source_path():
|
||||
global capture_source_path
|
||||
my_repo = None
|
||||
@ -1491,6 +1505,7 @@ def set_capture_source_path():
|
||||
if rc == 0:
|
||||
capture_source_path = new_path
|
||||
|
||||
|
||||
def capture_rpms():
|
||||
for build_type in BUILD_TYPES:
|
||||
src_rpm_dir = "%s/%s/%s" % (workdir, build_type, RPM_DIR)
|
||||
@ -1499,12 +1514,14 @@ def capture_rpms():
|
||||
issue_cmd("mkdir -p %s" % dest_rpm_dir)
|
||||
issue_cmd("rsync -avu %s/*.rpm %s" % (src_rpm_dir, dest_rpm_dir))
|
||||
|
||||
|
||||
def modify_patch_usage():
|
||||
msg = "modify_patch [ --obsolete | --released | --development ] [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ]"
|
||||
LOG.exception(msg)
|
||||
print msg
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def modify_patch():
|
||||
global workdir
|
||||
global temp_rpm_db_dir
|
||||
@ -1523,11 +1540,10 @@ def modify_patch():
|
||||
'sw_version=',
|
||||
'id=',
|
||||
'file=',
|
||||
])
|
||||
])
|
||||
except getopt.GetoptError as e:
|
||||
print str(e)
|
||||
modify_patch_usage()
|
||||
|
||||
|
||||
patch_path = None
|
||||
cwd = os.getcwd()
|
||||
@ -1572,7 +1588,7 @@ def modify_patch():
|
||||
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
|
||||
if patch_path is not None:
|
||||
rc = PatchFile.modify_patch(patch_path, "status", new_status)
|
||||
assert(rc == True)
|
||||
assert(rc)
|
||||
print "Patch '%s' has been modified to status '%s'" % (patch_path, new_status)
|
||||
else:
|
||||
if sw_version is None or patch_id is None:
|
||||
@ -1588,7 +1604,7 @@ def modify_patch():
|
||||
print "patch_file_name = %s" % patch_file_name
|
||||
print "patch_path = %s" % patch_path
|
||||
rc = PatchFile.modify_patch(patch_path, "status", new_status)
|
||||
assert(rc == True)
|
||||
assert(rc)
|
||||
os.chdir(pl._std_patch_git_path(".."))
|
||||
issue_cmd("git add %s" % patch_path)
|
||||
issue_cmd("git commit -m \"Modify status of patch '%s' to '%s'\"" % (patch_id, new_status))
|
||||
@ -1652,7 +1668,7 @@ def modify_patch():
|
||||
human_release = "Titanium Cloud 4"
|
||||
windshare_folder = "Titanium-Cloud-4"
|
||||
|
||||
if sw_version == "18.03" || sw_version == "18.03"
|
||||
if sw_version == "18.03" or sw_version == "18.03":
|
||||
local_dest = "/folk/cgts/rel-ops/%s/patches/" % sw_version
|
||||
deliver_dest = "/folk/prj-wrlinux/release/tis/tis-5/update/ti%s-%s/Titanium-Cloud-5/patches" % (ts, munged_patch_id)
|
||||
human_release = "Titanium Cloud 5"
|
||||
@ -1700,7 +1716,7 @@ def modify_patch():
|
||||
print "Failed to modify patch!"
|
||||
finally:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
|
||||
|
||||
def query_patch_usage():
|
||||
msg = "query_patch [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ] [ --field <field_name> ]"
|
||||
@ -1711,6 +1727,7 @@ def query_patch_usage():
|
||||
print msg
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def query_patch():
|
||||
global workdir
|
||||
global temp_rpm_db_dir
|
||||
@ -1727,12 +1744,11 @@ def query_patch():
|
||||
'id=',
|
||||
'file=',
|
||||
'field=',
|
||||
])
|
||||
])
|
||||
except getopt.GetoptError as e:
|
||||
print str(e)
|
||||
query_patch_usage()
|
||||
|
||||
|
||||
patch_path = None
|
||||
cwd = os.getcwd()
|
||||
field = None
|
||||
@ -1758,7 +1774,7 @@ def query_patch():
|
||||
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
|
||||
if patch_path is not None:
|
||||
answer = PatchFile.query_patch(patch_path, field=field)
|
||||
field_order=['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings']
|
||||
field_order = ['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings']
|
||||
for k in field_order:
|
||||
if k in answer.keys():
|
||||
print "%s: '%s'" % (k, answer[k])
|
||||
@ -1794,6 +1810,7 @@ def make_patch_usage():
|
||||
print msg
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def make_patch():
|
||||
global workdir
|
||||
global temp_rpm_db_dir
|
||||
@ -1825,7 +1842,7 @@ def make_patch():
|
||||
'srcdir=',
|
||||
'branch=',
|
||||
'sw_version=',
|
||||
])
|
||||
])
|
||||
except getopt.GetoptError as e:
|
||||
print str(e)
|
||||
make_patch_usage()
|
||||
@ -1894,14 +1911,14 @@ def make_patch():
|
||||
# TODO if branch is not None or workdir is not None or srcdir is not None:
|
||||
# TODO print "If --formal is specified, then srcdir, workdir and branch are automatci and must not be specified"
|
||||
# TODO make_patch_usage()
|
||||
|
||||
|
||||
if pre_compiled_flag and formal_flag:
|
||||
print "invalid options: --formal and --pre-compiled can't be used together."
|
||||
make_patch_usage()
|
||||
|
||||
if workdir is not None:
|
||||
if not os.path.isdir(workdir):
|
||||
print "invalid directory: workdir = '%s'" % workdir
|
||||
print "invalid directory: workdir = '%s'" % workdir
|
||||
make_patch_usage()
|
||||
|
||||
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
|
||||
@ -1916,7 +1933,6 @@ def make_patch():
|
||||
print "invalid patch file path: '%s'" % patch
|
||||
make_patch_usage()
|
||||
|
||||
|
||||
if 'MY_REPO' in os.environ:
|
||||
MY_REPO = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_REPO'])))
|
||||
else:
|
||||
@ -1952,7 +1968,7 @@ def make_patch():
|
||||
else:
|
||||
print "ERROR: environment variable 'MY_BUILD_CFG' is not defined"
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if 'MY_BUILD_DIR' in os.environ:
|
||||
MY_BUILD_DIR = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_BUILD_DIR'])))
|
||||
else:
|
||||
@ -1989,6 +2005,5 @@ def make_patch():
|
||||
|
||||
# sign formal patch
|
||||
pl.sign_official_patches()
|
||||
# deliver to git repo
|
||||
# deliver to git repo
|
||||
pl.deliver_official_patch()
|
||||
|
||||
|
@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
|
||||
|
@ -9,19 +9,19 @@ from oslo_config import cfg
|
||||
|
||||
|
||||
API_SERVICE_OPTS = [
|
||||
cfg.StrOpt('api_bind_ip',
|
||||
default='127.0.0.1',
|
||||
help='IP for the Patching controller API server to bind to',
|
||||
),
|
||||
cfg.IntOpt('api_port',
|
||||
default=5487,
|
||||
help='The port for the Patching controller API server',
|
||||
),
|
||||
cfg.IntOpt('api_limit_max',
|
||||
cfg.StrOpt('api_bind_ip',
|
||||
default='127.0.0.1',
|
||||
help='IP for the Patching controller API server to bind to',
|
||||
),
|
||||
cfg.IntOpt('api_port',
|
||||
default=5487,
|
||||
help='The port for the Patching controller API server',
|
||||
),
|
||||
cfg.IntOpt('api_limit_max',
|
||||
default=1000,
|
||||
help='the maximum number of items returned in a single '
|
||||
'response from a collection resource'),
|
||||
]
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
opt_group = cfg.OptGroup(name='api',
|
||||
|
@ -5,12 +5,10 @@ SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
|
||||
#from oslo.config import cfg
|
||||
import pecan
|
||||
|
||||
from cgcs_patch.api import config
|
||||
|
||||
#CONF = cfg.CONF
|
||||
|
||||
def get_pecan_config():
|
||||
# Set up the pecan configuration
|
||||
@ -30,7 +28,7 @@ def setup_app(pecan_config=None):
|
||||
template_path=pecan_config.app.template_path,
|
||||
debug=False,
|
||||
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
|
||||
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||
)
|
||||
|
||||
return app
|
||||
|
@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
|
||||
|
@ -16,6 +16,7 @@ from cgcs_patch.patch_controller import pc
|
||||
import logging
|
||||
from cgcs_patch.patch_functions import LOG
|
||||
|
||||
|
||||
class PatchAPIController(object):
|
||||
|
||||
@expose('json')
|
||||
@ -107,7 +108,7 @@ class PatchAPIController(object):
|
||||
# currently 64K chunk size is selected
|
||||
dst = os.open(fn, os.O_WRONLY | os.O_CREAT)
|
||||
src = fileitem.file.fileno()
|
||||
size = 64*1024
|
||||
size = 64 * 1024
|
||||
n = size
|
||||
while n >= size:
|
||||
s = os.read(src, size)
|
||||
@ -263,4 +264,3 @@ class RootController(object):
|
||||
|
||||
patch = PatchAPIController()
|
||||
v1 = PatchAPIController()
|
||||
|
||||
|
@ -6,16 +6,16 @@
|
||||
from oslo_config import cfg
|
||||
|
||||
API_SERVICE_OPTS = [
|
||||
cfg.StrOpt('auth_api_bind_ip',
|
||||
default=None,
|
||||
help='IP for the authenticated Patching API server to bind to'),
|
||||
cfg.IntOpt('auth_api_port',
|
||||
default=5491,
|
||||
help='The port for the authenticated Patching API server'),
|
||||
cfg.IntOpt('api_limit_max',
|
||||
default=1000,
|
||||
help='the maximum number of items returned in a single '
|
||||
'response from a collection resource')
|
||||
cfg.StrOpt('auth_api_bind_ip',
|
||||
default=None,
|
||||
help='IP for the authenticated Patching API server to bind to'),
|
||||
cfg.IntOpt('auth_api_port',
|
||||
default=5491,
|
||||
help='The port for the authenticated Patching API server'),
|
||||
cfg.IntOpt('api_limit_max',
|
||||
default=1000,
|
||||
help='the maximum number of items returned in a single '
|
||||
'response from a collection resource')
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -4,13 +4,15 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
"""Access Control Lists (ACL's) control access the API server."""
|
||||
|
||||
from cgcs_patch.authapi import auth_token
|
||||
|
||||
OPT_GROUP_NAME = 'keystone_authtoken'
|
||||
|
||||
|
||||
"""Access Control Lists (ACL's) control access the API server."""
|
||||
|
||||
|
||||
def install(app, conf, public_routes):
|
||||
"""Install ACL check on application.
|
||||
|
||||
@ -21,7 +23,7 @@ def install(app, conf, public_routes):
|
||||
:return: The same WSGI application with ACL installed.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
keystone_config = dict(conf.items(OPT_GROUP_NAME))
|
||||
return auth_token.AuthTokenMiddleware(app,
|
||||
conf=keystone_config,
|
||||
|
@ -17,9 +17,9 @@ import ConfigParser
|
||||
|
||||
auth_opts = [
|
||||
cfg.StrOpt('auth_strategy',
|
||||
default='keystone',
|
||||
help='Method to use for auth: noauth or keystone.'),
|
||||
]
|
||||
default='keystone',
|
||||
help='Method to use for auth: noauth or keystone.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(auth_opts)
|
||||
@ -58,7 +58,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
|
||||
debug=False,
|
||||
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
|
||||
hooks=app_hooks,
|
||||
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||
)
|
||||
|
||||
if pecan_config.app.enable_acl:
|
||||
|
@ -30,6 +30,7 @@ from sysinv.openstack.common import policy
|
||||
_POLICY_PATH = None
|
||||
_POLICY_CACHE = {}
|
||||
|
||||
|
||||
def reset():
|
||||
global _POLICY_PATH
|
||||
global _POLICY_CACHE
|
||||
|
@ -146,7 +146,7 @@ class PatchService:
|
||||
|
||||
def audit_socket(self):
|
||||
# Ensure multicast address is still allocated
|
||||
cmd = "ip maddr show %s | awk 'BEGIN { ORS=\"\" }; {if ($2 == \"%s\") print $2}'" % \
|
||||
cmd = "ip maddr show %s | awk 'BEGIN {ORS=\"\"}; {if ($2 == \"%s\") print $2}'" % \
|
||||
(cfg.get_mgmt_iface(), self.mcast_addr)
|
||||
try:
|
||||
result = subprocess.check_output(cmd, shell=True)
|
||||
@ -163,4 +163,3 @@ class PatchService:
|
||||
LOG.info("Unable to setup sockets. Waiting to retry")
|
||||
time.sleep(5)
|
||||
LOG.info("Multicast address reconfigured")
|
||||
|
||||
|
@ -27,7 +27,7 @@ dev_certificate = b"""-----BEGIN CERTIFICATE-----
|
||||
Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u
|
||||
-----END CERTIFICATE-----"""
|
||||
|
||||
formal_certificate=b"""-----BEGIN CERTIFICATE-----
|
||||
formal_certificate = b"""-----BEGIN CERTIFICATE-----
|
||||
MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
|
||||
EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
|
||||
SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT
|
||||
|
@ -26,6 +26,7 @@ platform_conf_mtime = 0
|
||||
patching_conf_mtime = 0
|
||||
patching_conf = '/etc/patching/patching.conf'
|
||||
|
||||
|
||||
def read_config():
|
||||
global patching_conf_mtime
|
||||
global patching_conf
|
||||
@ -122,5 +123,3 @@ def get_mgmt_iface():
|
||||
logging.exception("Failed to read management_interface from config")
|
||||
return None
|
||||
return mgmt_if
|
||||
|
||||
|
||||
|
@ -5,6 +5,7 @@ SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class PatchError(Exception):
|
||||
"""Base class for patching exceptions."""
|
||||
|
||||
@ -42,4 +43,4 @@ class PatchValidationFailure(PatchError):
|
||||
|
||||
class PatchMismatchFailure(PatchError):
|
||||
"""Patch validation error."""
|
||||
pass
|
||||
pass
|
||||
|
@ -46,18 +46,18 @@ run_insvc_patch_scripts_cmd = "/usr/sbin/run-patch-scripts"
|
||||
pa = None
|
||||
|
||||
# Smart commands
|
||||
smart_cmd = [ "/usr/bin/smart" ]
|
||||
smart_quiet = smart_cmd + [ "--quiet" ]
|
||||
smart_update = smart_quiet + [ "update" ]
|
||||
smart_newer = smart_quiet + [ "newer" ]
|
||||
smart_orphans = smart_quiet + [ "query", "--orphans", "--show-format", "$name\n" ]
|
||||
smart_query = smart_quiet + [ "query" ]
|
||||
smart_query_repos = smart_quiet + [ "query", "--channel=base", "--channel=updates" ]
|
||||
smart_install_cmd = smart_cmd + [ "install", "--yes", "--explain" ]
|
||||
smart_remove_cmd = smart_cmd + [ "remove", "--yes", "--explain" ]
|
||||
smart_query_installed = smart_quiet + [ "query", "--installed", "--show-format", "$name $version\n" ]
|
||||
smart_query_base = smart_quiet + [ "query", "--channel=base", "--show-format", "$name $version\n" ]
|
||||
smart_query_updates = smart_quiet + [ "query", "--channel=updates", "--show-format", "$name $version\n" ]
|
||||
smart_cmd = ["/usr/bin/smart"]
|
||||
smart_quiet = smart_cmd + ["--quiet"]
|
||||
smart_update = smart_quiet + ["update"]
|
||||
smart_newer = smart_quiet + ["newer"]
|
||||
smart_orphans = smart_quiet + ["query", "--orphans", "--show-format", "$name\n"]
|
||||
smart_query = smart_quiet + ["query"]
|
||||
smart_query_repos = smart_quiet + ["query", "--channel=base", "--channel=updates"]
|
||||
smart_install_cmd = smart_cmd + ["install", "--yes", "--explain"]
|
||||
smart_remove_cmd = smart_cmd + ["remove", "--yes", "--explain"]
|
||||
smart_query_installed = smart_quiet + ["query", "--installed", "--show-format", "$name $version\n"]
|
||||
smart_query_base = smart_quiet + ["query", "--channel=base", "--show-format", "$name $version\n"]
|
||||
smart_query_updates = smart_quiet + ["query", "--channel=updates", "--show-format", "$name $version\n"]
|
||||
|
||||
|
||||
def setflag(fname):
|
||||
@ -334,7 +334,7 @@ class PatchAgent(PatchService):
|
||||
# Get the current channel config
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
[ "channel", "--yaml" ],
|
||||
["channel", "--yaml"],
|
||||
stderr=subprocess.STDOUT)
|
||||
config = yaml.load(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
@ -345,18 +345,18 @@ class PatchAgent(PatchService):
|
||||
LOG.exception("Failed to query channels")
|
||||
return False
|
||||
|
||||
expected = [ { 'channel': 'rpmdb',
|
||||
'type': 'rpm-sys',
|
||||
'name': 'RPM Database',
|
||||
'baseurl': None },
|
||||
{ 'channel': 'base',
|
||||
'type': 'rpm-md',
|
||||
'name': 'Base',
|
||||
'baseurl': "http://controller/feed/rel-%s" % SW_VERSION},
|
||||
{ 'channel': 'updates',
|
||||
'type': 'rpm-md',
|
||||
'name': 'Patches',
|
||||
'baseurl': "http://controller/updates/rel-%s" % SW_VERSION} ]
|
||||
expected = [{'channel': 'rpmdb',
|
||||
'type': 'rpm-sys',
|
||||
'name': 'RPM Database',
|
||||
'baseurl': None},
|
||||
{'channel': 'base',
|
||||
'type': 'rpm-md',
|
||||
'name': 'Base',
|
||||
'baseurl': "http://controller/feed/rel-%s" % SW_VERSION},
|
||||
{'channel': 'updates',
|
||||
'type': 'rpm-md',
|
||||
'name': 'Patches',
|
||||
'baseurl': "http://controller/updates/rel-%s" % SW_VERSION}]
|
||||
|
||||
updated = False
|
||||
|
||||
@ -367,7 +367,7 @@ class PatchAgent(PatchService):
|
||||
ch_baseurl = item['baseurl']
|
||||
|
||||
add_channel = False
|
||||
|
||||
|
||||
if channel in config:
|
||||
# Verify existing channel config
|
||||
if (config[channel].get('type') != ch_type or
|
||||
@ -378,8 +378,8 @@ class PatchAgent(PatchService):
|
||||
LOG.warning("Invalid smart config found for %s" % channel)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
[ "channel", "--yes",
|
||||
"--remove", channel ],
|
||||
["channel", "--yes",
|
||||
"--remove", channel],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to configure %s channel" % channel)
|
||||
@ -392,11 +392,11 @@ class PatchAgent(PatchService):
|
||||
|
||||
if add_channel:
|
||||
LOG.info("Adding channel %s" % channel)
|
||||
cmd_args = [ "channel", "--yes", "--add", channel,
|
||||
"type=%s" % ch_type,
|
||||
"name=%s" % ch_name ]
|
||||
cmd_args = ["channel", "--yes", "--add", channel,
|
||||
"type=%s" % ch_type,
|
||||
"name=%s" % ch_name]
|
||||
if ch_baseurl is not None:
|
||||
cmd_args += [ "baseurl=%s" % ch_baseurl ]
|
||||
cmd_args += ["baseurl=%s" % ch_baseurl]
|
||||
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd + cmd_args,
|
||||
@ -411,7 +411,7 @@ class PatchAgent(PatchService):
|
||||
# Validate the smart config
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
[ "config", "--yaml" ],
|
||||
["config", "--yaml"],
|
||||
stderr=subprocess.STDOUT)
|
||||
config = yaml.load(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
@ -429,8 +429,8 @@ class PatchAgent(PatchService):
|
||||
LOG.warning("Setting %s option" % nolinktos)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
[ "config", "--set",
|
||||
"%s=true" % nolinktos ],
|
||||
["config", "--set",
|
||||
"%s=true" % nolinktos],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to configure %s option" % nolinktos)
|
||||
@ -446,8 +446,8 @@ class PatchAgent(PatchService):
|
||||
LOG.warning("Setting %s option" % nosignature)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
[ "config", "--set",
|
||||
"%s=false" % nosignature],
|
||||
["config", "--set",
|
||||
"%s=false" % nosignature],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to configure %s option" % nosignature)
|
||||
@ -542,7 +542,7 @@ class PatchAgent(PatchService):
|
||||
highest_version = None
|
||||
|
||||
try:
|
||||
query = subprocess.check_output(smart_query_repos + [ "--show-format", '$version\n', pkgname ])
|
||||
query = subprocess.check_output(smart_query_repos + ["--show-format", '$version\n', pkgname])
|
||||
# The last non-blank version is the highest
|
||||
for version in query.splitlines():
|
||||
if version == '':
|
||||
@ -562,7 +562,7 @@ class PatchAgent(PatchService):
|
||||
|
||||
# Get the installed version
|
||||
try:
|
||||
query = subprocess.check_output(smart_query + [ "--installed", "--show-format", '$version\n', pkgname ])
|
||||
query = subprocess.check_output(smart_query + ["--installed", "--show-format", '$version\n', pkgname])
|
||||
for version in query.splitlines():
|
||||
if version == '':
|
||||
continue
|
||||
@ -932,8 +932,8 @@ class PatchAgent(PatchService):
|
||||
remaining = 30
|
||||
|
||||
while True:
|
||||
inputs = [ self.sock_in, self.listener ] + connections
|
||||
outputs = [ ]
|
||||
inputs = [self.sock_in, self.listener] + connections
|
||||
outputs = []
|
||||
|
||||
rlist, wlist, xlist = select.select(inputs, outputs, inputs, remaining)
|
||||
|
||||
|
@ -84,58 +84,58 @@ def print_help():
|
||||
print "Subcomands:"
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("upload:") + help_upload,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("upload-dir:") + help_upload_dir,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("apply:") + help_apply,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print textwrap.fill(help_patch_args,
|
||||
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("remove:") + help_remove,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print textwrap.fill(help_patch_args,
|
||||
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("delete:") + help_delete,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print textwrap.fill(help_patch_args,
|
||||
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("query:") + help_query,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("show:") + help_show,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("what-requires:") + help_what_requires,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("query-hosts:") + help_query_hosts,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("host-install:") + help_host_install,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("host-install-async:") + help_host_install_async,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("install-local:") + help_install_local,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("drop-host:") + help_drop_host,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("query-dependencies:") + help_query_dependencies,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("commit:") + help_commit,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
print textwrap.fill(" {0:<15} ".format("--os-region-name:") + help_region_name,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
print ""
|
||||
|
||||
exit(1)
|
||||
@ -284,73 +284,73 @@ def print_patch_show_result(req):
|
||||
|
||||
if "sw_version" in pd[patch_id] and pd[patch_id]["sw_version"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Release:") + pd[patch_id]["sw_version"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "patchstate" in pd[patch_id] and pd[patch_id]["patchstate"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Patch State:") + pd[patch_id]["patchstate"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if pd[patch_id]["patchstate"] == "n/a":
|
||||
if "repostate" in pd[patch_id] and pd[patch_id]["repostate"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Repo State:") + pd[patch_id]["repostate"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "status" in pd[patch_id] and pd[patch_id]["status"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Status:") + pd[patch_id]["status"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "unremovable" in pd[patch_id] and pd[patch_id]["unremovable"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Unremovable:") + pd[patch_id]["unremovable"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "reboot_required" in pd[patch_id] and pd[patch_id]["reboot_required"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("RR:") + pd[patch_id]["reboot_required"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "summary" in pd[patch_id] and pd[patch_id]["summary"] != "":
|
||||
print textwrap.fill(" {0:<15} ".format("Summary:") + pd[patch_id]["summary"],
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
|
||||
if "description" in pd[patch_id] and pd[patch_id]["description"] != "":
|
||||
first_line = True
|
||||
for line in pd[patch_id]["description"].split('\n'):
|
||||
if first_line:
|
||||
print textwrap.fill(" {0:<15} ".format("Description:") + line,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
first_line = False
|
||||
else:
|
||||
print textwrap.fill(line,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20,
|
||||
initial_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20,
|
||||
initial_indent=' ' * 20)
|
||||
|
||||
if "install_instructions" in pd[patch_id] and pd[patch_id]["install_instructions"] != "":
|
||||
print " Install Instructions:"
|
||||
for line in pd[patch_id]["install_instructions"].split('\n'):
|
||||
print textwrap.fill(line,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20,
|
||||
initial_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20,
|
||||
initial_indent=' ' * 20)
|
||||
|
||||
if "warnings" in pd[patch_id] and pd[patch_id]["warnings"] != "":
|
||||
first_line = True
|
||||
for line in pd[patch_id]["warnings"].split('\n'):
|
||||
if first_line:
|
||||
print textwrap.fill(" {0:<15} ".format("Warnings:") + line,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20)
|
||||
first_line = False
|
||||
else:
|
||||
print textwrap.fill(line,
|
||||
width=TERM_WIDTH, subsequent_indent=' '*20,
|
||||
initial_indent=' '*20)
|
||||
width=TERM_WIDTH, subsequent_indent=' ' * 20,
|
||||
initial_indent=' ' * 20)
|
||||
|
||||
if "requires" in pd[patch_id] and len(pd[patch_id]["requires"]) > 0:
|
||||
print " Requires:"
|
||||
for req_patch in sorted(pd[patch_id]["requires"]):
|
||||
print ' '*20 + req_patch
|
||||
print ' ' * 20 + req_patch
|
||||
|
||||
if "contents" in data and patch_id in data["contents"]:
|
||||
print " Contents:"
|
||||
for pkg in sorted(data["contents"][patch_id]):
|
||||
print ' '*20 + pkg
|
||||
print ' ' * 20 + pkg
|
||||
|
||||
print "\n"
|
||||
|
||||
@ -597,7 +597,7 @@ def patch_commit_req(debug, args):
|
||||
print
|
||||
commit_warning = "WARNING: Committing a patch is an irreversible operation. " + \
|
||||
"Committed patches cannot be removed."
|
||||
print textwrap.fill(commit_warning, width=TERM_WIDTH, subsequent_indent=' '*9)
|
||||
print textwrap.fill(commit_warning, width=TERM_WIDTH, subsequent_indent=' ' * 9)
|
||||
print
|
||||
|
||||
user_input = raw_input("Would you like to continue? [y/N]: ")
|
||||
@ -699,19 +699,19 @@ def print_query_hosts_result(req):
|
||||
|
||||
for agent in sorted(agents, key=lambda a: a["hostname"]):
|
||||
patch_current_field = "Yes" if agent["patch_current"] else "No"
|
||||
if agent.get("interim_state") == True:
|
||||
if agent.get("interim_state") is True:
|
||||
patch_current_field = "Pending"
|
||||
|
||||
if agent["patch_failed"]:
|
||||
patch_current_field = "Failed"
|
||||
|
||||
print "{0:<{width_hn}} {1:<{width_ip}} {2:^{width_pc}} {3:^{width_rr}} {4:^{width_rel}} {5:^{width_state}}".format(
|
||||
agent["hostname"],
|
||||
agent["ip"],
|
||||
agent["hostname"],
|
||||
agent["ip"],
|
||||
patch_current_field,
|
||||
"Yes" if agent["requires_reboot"] else "No",
|
||||
agent["sw_version"],
|
||||
agent["state"],
|
||||
agent["sw_version"],
|
||||
agent["state"],
|
||||
width_hn=width_hn, width_ip=width_ip, width_pc=width_pc, width_rr=width_rr, width_rel=width_rel, width_state=width_state)
|
||||
|
||||
elif req.status_code == 500:
|
||||
@ -862,7 +862,7 @@ def wait_for_install_complete(agent_ip):
|
||||
break
|
||||
|
||||
if state == constants.PATCH_AGENT_STATE_INSTALLING or \
|
||||
interim_state == True:
|
||||
interim_state is True:
|
||||
# Still installing
|
||||
sys.stdout.write(".")
|
||||
sys.stdout.flush()
|
||||
@ -1206,7 +1206,7 @@ def check_for_os_region_name():
|
||||
try:
|
||||
subprocess.check_output("pgrep -f dcorch-api-proxy", shell=True)
|
||||
except subprocess.CalledProcessError:
|
||||
print ("Command must be run from the active controller.")
|
||||
print("Command must be run from the active controller.")
|
||||
exit(1)
|
||||
|
||||
# get a token and fetch the internal endpoint in SystemController
|
||||
|
@ -537,7 +537,6 @@ class PatchMessageDropHostReq(messages.PatchMessage):
|
||||
sock.sendto(message, (cfg.controller_mcast_group, cfg.controller_port))
|
||||
|
||||
|
||||
|
||||
class PatchController(PatchService):
|
||||
def __init__(self):
|
||||
PatchService.__init__(self)
|
||||
@ -1561,7 +1560,7 @@ class PatchController(PatchService):
|
||||
self.inc_patch_op_counter()
|
||||
|
||||
self.patch_data_lock.acquire()
|
||||
#self.patch_data.load_all()
|
||||
# self.patch_data.load_all()
|
||||
self.check_patch_states()
|
||||
self.patch_data_lock.release()
|
||||
|
||||
@ -1813,7 +1812,7 @@ class PatchController(PatchService):
|
||||
raise e
|
||||
|
||||
if pkgname not in keep[patch_sw_version]:
|
||||
keep[patch_sw_version][pkgname] = { arch: pkgver }
|
||||
keep[patch_sw_version][pkgname] = {arch: pkgver}
|
||||
continue
|
||||
elif arch not in keep[patch_sw_version][pkgname]:
|
||||
keep[patch_sw_version][pkgname][arch] = pkgver
|
||||
@ -1823,9 +1822,9 @@ class PatchController(PatchService):
|
||||
keep_pkgver = keep[patch_sw_version][pkgname][arch]
|
||||
if pkgver > keep_pkgver:
|
||||
if pkgname not in cleanup[patch_sw_version]:
|
||||
cleanup[patch_sw_version][pkgname] = { arch: [ keep_pkgver ] }
|
||||
cleanup[patch_sw_version][pkgname] = {arch: [keep_pkgver]}
|
||||
elif arch not in cleanup[patch_sw_version][pkgname]:
|
||||
cleanup[patch_sw_version][pkgname][arch] = [ keep_pkgver ]
|
||||
cleanup[patch_sw_version][pkgname][arch] = [keep_pkgver]
|
||||
else:
|
||||
cleanup[patch_sw_version][pkgname][arch].append(keep_pkgver)
|
||||
|
||||
@ -1845,9 +1844,9 @@ class PatchController(PatchService):
|
||||
else:
|
||||
# Put this pkg in the cleanup list
|
||||
if pkgname not in cleanup[patch_sw_version]:
|
||||
cleanup[patch_sw_version][pkgname] = { arch: [ pkgver ] }
|
||||
cleanup[patch_sw_version][pkgname] = {arch: [pkgver]}
|
||||
elif arch not in cleanup[patch_sw_version][pkgname]:
|
||||
cleanup[patch_sw_version][pkgname][arch] = [ pkgver ]
|
||||
cleanup[patch_sw_version][pkgname][arch] = [pkgver]
|
||||
else:
|
||||
cleanup[patch_sw_version][pkgname][arch].append(pkgver)
|
||||
|
||||
@ -1868,7 +1867,7 @@ class PatchController(PatchService):
|
||||
disk_space += statinfo.st_size
|
||||
|
||||
if dry_run:
|
||||
results["info"] = "This commit operation would free %0.2f MiB" % (disk_space/(1024.0*1024.0))
|
||||
results["info"] = "This commit operation would free %0.2f MiB" % (disk_space / (1024.0 * 1024.0))
|
||||
return results
|
||||
|
||||
# Do the commit
|
||||
|
@ -410,7 +410,7 @@ class PatchData:
|
||||
else:
|
||||
outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" "))
|
||||
outfile.close()
|
||||
os.rename(new_filename, filename)
|
||||
os.rename(new_filename, filename)
|
||||
|
||||
def parse_metadata(self,
|
||||
filename,
|
||||
@ -773,16 +773,16 @@ class PatchFile:
|
||||
for rpmfile in self.rpmlist.keys():
|
||||
shutil.copy(rpmfile, tmpdir)
|
||||
|
||||
# add file signatures to RPMs
|
||||
try:
|
||||
subprocess.check_call(["sign-rpms", "-d", tmpdir])
|
||||
except subprocess.CalledProcessError as e:
|
||||
print "Failed to to add file signatures to RPMs. Call to sign-rpms process returned non-zero exit status %i" % e.returncode
|
||||
# add file signatures to RPMs
|
||||
try:
|
||||
subprocess.check_call(["sign-rpms", "-d", tmpdir])
|
||||
except subprocess.CalledProcessError as e:
|
||||
print "Failed to to add file signatures to RPMs. Call to sign-rpms process returned non-zero exit status %i" % e.returncode
|
||||
os.chdir(orig_wd)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise SystemExit(e.returncode)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise SystemExit(e.returncode)
|
||||
|
||||
# generate tar file
|
||||
# generate tar file
|
||||
tar = tarfile.open("software.tar", "w")
|
||||
for rpmfile in self.rpmlist.keys():
|
||||
tar.add(os.path.basename(rpmfile))
|
||||
@ -824,7 +824,8 @@ class PatchFile:
|
||||
# Note: if cert_type requests a formal signature, but the signing key
|
||||
# is not found, we'll instead sign with the 'dev' key and
|
||||
# need_resign_with_formal is set to True.
|
||||
need_resign_with_formal = sign_files(['metadata.tar', 'software.tar'],
|
||||
need_resign_with_formal = sign_files(
|
||||
['metadata.tar', 'software.tar'],
|
||||
detached_signature_file,
|
||||
cert_type=cert_type)
|
||||
|
||||
@ -897,7 +898,7 @@ class PatchFile:
|
||||
if verify_signature:
|
||||
# If there should be a detached signature, verify it
|
||||
if os.path.exists(detached_signature_file):
|
||||
filenames=["metadata.tar", "software.tar"]
|
||||
filenames = ["metadata.tar", "software.tar"]
|
||||
sig_valid = verify_files(
|
||||
filenames,
|
||||
detached_signature_file,
|
||||
@ -947,11 +948,11 @@ class PatchFile:
|
||||
try:
|
||||
PatchFile.read_patch(abs_patch, metadata_only=True, cert_type=[cert_type_str])
|
||||
except PatchValidationFailure as e:
|
||||
pass;
|
||||
pass
|
||||
else:
|
||||
# Successfully opened the file for reading, and we have discovered the cert_type
|
||||
r["cert"] = cert_type_str
|
||||
break;
|
||||
break
|
||||
|
||||
if "cert" not in r:
|
||||
# If cert is unknown, then file is not yet open for reading.
|
||||
@ -973,7 +974,7 @@ class PatchFile:
|
||||
"warnings", "reboot_required"]:
|
||||
r[f] = thispatch.query_line(patch_id, f)
|
||||
else:
|
||||
if field not in [ 'id', 'cert' ]:
|
||||
if field not in ['id', 'cert']:
|
||||
r[field] = thispatch.query_line(patch_id, field)
|
||||
|
||||
except PatchValidationFailure as e:
|
||||
@ -1185,7 +1186,7 @@ def patch_build():
|
||||
'all-nodes='])
|
||||
except getopt.GetoptError:
|
||||
print "Usage: %s [ <args> ] ... <rpm list>" \
|
||||
% os.path.basename(sys.argv[0])
|
||||
% os.path.basename(sys.argv[0])
|
||||
print "Options:"
|
||||
print "\t--id <id> Patch ID"
|
||||
print "\t--release <version> Platform release version"
|
||||
|
@ -14,15 +14,15 @@ from binascii import a2b_base64
|
||||
from cgcs_patch.patch_verify import read_RSA_key, cert_type_formal_str, cert_type_dev_str
|
||||
|
||||
# To save memory, read and hash 1M of files at a time
|
||||
default_blocksize=1*1024*1024
|
||||
default_blocksize = 1 * 1024 * 1024
|
||||
|
||||
# When we sign patches, look for private keys in the following paths
|
||||
#
|
||||
# The (currently hardcoded) path on the signing server will be replaced
|
||||
# by the capability to specify filename from calling function.
|
||||
private_key_files={cert_type_formal_str: '/signing/keys/formal-private-key.pem',
|
||||
cert_type_dev_str: os.path.expandvars('$MY_REPO/build-tools/signing/dev-private-key.pem')
|
||||
}
|
||||
private_key_files = {cert_type_formal_str: '/signing/keys/formal-private-key.pem',
|
||||
cert_type_dev_str: os.path.expandvars('$MY_REPO/build-tools/signing/dev-private-key.pem')
|
||||
}
|
||||
|
||||
|
||||
def sign_files(filenames, signature_file, private_key=None, cert_type=None):
|
||||
@ -39,21 +39,21 @@ def sign_files(filenames, signature_file, private_key=None, cert_type=None):
|
||||
"""
|
||||
|
||||
# Hash the data across all files
|
||||
blocksize=default_blocksize
|
||||
blocksize = default_blocksize
|
||||
data_hash = SHA256.new()
|
||||
for filename in filenames:
|
||||
with open(filename, 'rb') as infile:
|
||||
data=infile.read(blocksize)
|
||||
data = infile.read(blocksize)
|
||||
while len(data) > 0:
|
||||
data_hash.update(data)
|
||||
data=infile.read(blocksize)
|
||||
data = infile.read(blocksize)
|
||||
|
||||
# Find a private key to use, if not already provided
|
||||
need_resign_with_formal = False
|
||||
if private_key is None:
|
||||
if cert_type is not None:
|
||||
# A Specific key is asked for
|
||||
assert (cert_type in private_key_files.keys()),"cert_type=%s is not a known cert type" % cert_type
|
||||
assert (cert_type in private_key_files.keys()), "cert_type=%s is not a known cert type" % cert_type
|
||||
dict_key = cert_type
|
||||
filename = private_key_files[dict_key]
|
||||
# print 'cert_type given: Checking to see if ' + filename + ' exists\n'
|
||||
@ -75,7 +75,7 @@ def sign_files(filenames, signature_file, private_key=None, cert_type=None):
|
||||
# print 'Getting private key from ' + filename + '\n'
|
||||
private_key = read_RSA_key(open(filename, 'rb').read())
|
||||
|
||||
assert (private_key is not None),"Could not find signing key"
|
||||
assert (private_key is not None), "Could not find signing key"
|
||||
|
||||
# Encrypt the hash (sign the data) with the key we find
|
||||
signer = PKCS1_PSS.new(private_key)
|
||||
|
@ -18,16 +18,17 @@ from binascii import a2b_base64
|
||||
from cgcs_patch.certificates import dev_certificate, formal_certificate
|
||||
|
||||
# To save memory, read and hash 1M of files at a time
|
||||
default_blocksize=1*1024*1024
|
||||
default_blocksize = 1 * 1024 * 1024
|
||||
|
||||
dev_certificate_marker='/etc/pki/wrs/dev_certificate_enable.bin'
|
||||
dev_certificate_marker = '/etc/pki/wrs/dev_certificate_enable.bin'
|
||||
LOG = logging.getLogger('main_logger')
|
||||
|
||||
cert_type_dev_str='dev'
|
||||
cert_type_formal_str='formal'
|
||||
cert_type_dev=[cert_type_dev_str]
|
||||
cert_type_formal=[cert_type_formal_str]
|
||||
cert_type_all=[cert_type_dev_str, cert_type_formal_str]
|
||||
cert_type_dev_str = 'dev'
|
||||
cert_type_formal_str = 'formal'
|
||||
cert_type_dev = [cert_type_dev_str]
|
||||
cert_type_formal = [cert_type_formal_str]
|
||||
cert_type_all = [cert_type_dev_str, cert_type_formal_str]
|
||||
|
||||
|
||||
def verify_hash(data_hash, signature_bytes, certificate_list):
|
||||
"""
|
||||
@ -74,7 +75,7 @@ def get_public_certificates_by_type(cert_type=cert_type_all):
|
||||
"""
|
||||
Builds a list of accepted certificates which can be used to validate
|
||||
further things. This list may contain multiple certificates depending on
|
||||
the configuration of the system and the value of cert_type.
|
||||
the configuration of the system and the value of cert_type.
|
||||
|
||||
:param cert_type: A list of strings, certificate types to include in list
|
||||
'formal' - include formal certificate if available
|
||||
@ -137,7 +138,7 @@ def read_RSA_key(key_data):
|
||||
# the key object
|
||||
#
|
||||
# We need to strip the BEGIN and END lines from PEM first
|
||||
x509lines = key_data.replace(' ','').split()
|
||||
x509lines = key_data.replace(' ', '').split()
|
||||
x509text = ''.join(x509lines[1:-1])
|
||||
x509data = DerSequence()
|
||||
x509data.decode(a2b_base64(x509text))
|
||||
@ -166,14 +167,14 @@ def verify_files(filenames, signature_file, cert_type=None):
|
||||
"""
|
||||
|
||||
# Hash the data across all files
|
||||
blocksize=default_blocksize
|
||||
blocksize = default_blocksize
|
||||
data_hash = SHA256.new()
|
||||
for filename in filenames:
|
||||
with open(filename, 'rb') as infile:
|
||||
data=infile.read(blocksize)
|
||||
data = infile.read(blocksize)
|
||||
while len(data) > 0:
|
||||
data_hash.update(data)
|
||||
data=infile.read(blocksize)
|
||||
data = infile.read(blocksize)
|
||||
|
||||
# Get the signature
|
||||
with open(signature_file, 'rb') as sig_file:
|
||||
@ -185,4 +186,3 @@ def verify_files(filenames, signature_file, cert_type=None):
|
||||
else:
|
||||
certificate_list = get_public_certificates_by_type(cert_type=cert_type)
|
||||
return verify_hash(data_hash, signature_bytes, certificate_list)
|
||||
|
||||
|
@ -10,7 +10,8 @@ import posixfile
|
||||
import string
|
||||
import time
|
||||
|
||||
directory="/localdisk/designer/jenkins/patch_ids"
|
||||
directory = "/localdisk/designer/jenkins/patch_ids"
|
||||
|
||||
|
||||
def get_unique_id(filename, digits=4):
|
||||
counter = 1
|
||||
@ -29,8 +30,8 @@ def get_unique_id(filename, digits=4):
|
||||
print "creation of file '%s' failed" % path
|
||||
return -1
|
||||
|
||||
file.seek(0) # rewind
|
||||
format = "%%0%dd" % digits
|
||||
file.seek(0) # rewind
|
||||
format = "%%0%dd" % digits
|
||||
file.write(format % counter)
|
||||
|
||||
# Note: close releases lock
|
||||
@ -38,6 +39,7 @@ def get_unique_id(filename, digits=4):
|
||||
|
||||
return counter
|
||||
|
||||
|
||||
def get_patch_id(version, prefix="CGCS", digits=4):
|
||||
filename = "%s_%s_patchid" % (prefix, version)
|
||||
id = get_unique_id(filename)
|
||||
@ -46,4 +48,3 @@ def get_patch_id(version, prefix="CGCS", digits=4):
|
||||
patch_id_format = "%%s_%%s_PATCH_%%0%dd" % digits
|
||||
patch_id = patch_id_format % (prefix, version, id)
|
||||
return patch_id
|
||||
|
||||
|
@ -11,11 +11,12 @@ import getopt
|
||||
import sys
|
||||
|
||||
|
||||
opts = ['sw_version=', 'prefix=' ]
|
||||
opts = ['sw_version=', 'prefix=']
|
||||
|
||||
server = 'yow-cgts2-lx.wrs.com'
|
||||
port = 8888
|
||||
|
||||
|
||||
def request_patch_id(sw_version="1.01", prefix="CGCS"):
|
||||
raw_parms = {}
|
||||
raw_parms['sw_version'] = sw_version
|
||||
@ -27,6 +28,7 @@ def request_patch_id(sw_version="1.01", prefix="CGCS"):
|
||||
response = urllib2.urlopen(url, params).read()
|
||||
return response
|
||||
|
||||
|
||||
def main():
|
||||
optlist, remainder = getopt.getopt(sys.argv[1:], '', opts)
|
||||
|
||||
|
@ -11,13 +11,13 @@ import web
|
||||
import patch_id_allocator as pida
|
||||
|
||||
|
||||
|
||||
port = 8888
|
||||
|
||||
urls = (
|
||||
'/get_patch_id', 'get_patch_id',
|
||||
)
|
||||
|
||||
|
||||
class get_patch_id:
|
||||
def GET(self):
|
||||
data = web.input(sw_version=None, prefix="CGCS")
|
||||
@ -28,16 +28,17 @@ class get_patch_id:
|
||||
data = web.input(sw_version=None, prefix="CGCS")
|
||||
output = pida.get_patch_id(data.sw_version, data.prefix)
|
||||
return output
|
||||
|
||||
|
||||
|
||||
class MyApplication(web.application):
|
||||
def run(self, port=8080, *middleware):
|
||||
func = self.wsgifunc(*middleware)
|
||||
return web.httpserver.runsimple(func, ('0.0.0.0', port))
|
||||
|
||||
|
||||
def main():
|
||||
app = MyApplication(urls, globals())
|
||||
app.run(port=port)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -8,13 +8,13 @@
|
||||
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(name='cgcs_patch',
|
||||
setuptools.setup(
|
||||
name='cgcs_patch',
|
||||
version='1.0',
|
||||
description='CGCS Patch',
|
||||
packages=setuptools.find_packages(),
|
||||
package_data = {
|
||||
package_data={
|
||||
# Include templates
|
||||
'': ['templates/*'],
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -28,8 +28,6 @@ from cgcs_patch.constants import ENABLE_DEV_CERTIFICATE_PATCH_IDENTIFIER
|
||||
LOG_FILE = '/var/log/patch-alarms.log'
|
||||
PID_FILE = '/var/run/patch-alarm-manager.pid'
|
||||
|
||||
#logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG)
|
||||
|
||||
|
||||
###################
|
||||
# METHODS
|
||||
@ -97,10 +95,10 @@ class PatchAlarmDaemon():
|
||||
if 'pd' in data:
|
||||
for patch_id, metadata in data['pd'].iteritems():
|
||||
if 'patchstate' in metadata and \
|
||||
(metadata['patchstate'] == 'Partial-Apply' or metadata['patchstate'] == 'Partial-Remove'):
|
||||
(metadata['patchstate'] == 'Partial-Apply' or metadata['patchstate'] == 'Partial-Remove'):
|
||||
raise_pip_alarm = True
|
||||
if 'status' in metadata and \
|
||||
(metadata['status'] == 'OBS' or metadata['status'] == 'Obsolete'):
|
||||
(metadata['status'] == 'OBS' or metadata['status'] == 'Obsolete'):
|
||||
raise_obs_alarm = True
|
||||
# If there is a patch in the system (in any state) that is
|
||||
# named some variation of "enable-dev-certificate", raise
|
||||
@ -113,16 +111,16 @@ class PatchAlarmDaemon():
|
||||
entity_instance_id)
|
||||
if raise_pip_alarm and pip_alarm is None:
|
||||
logging.info("Raising patch-in-progress alarm")
|
||||
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_IN_PROGRESS,
|
||||
alarm_type = fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state = fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id = entity_instance_id,
|
||||
severity = fm_constants.FM_ALARM_SEVERITY_MINOR,
|
||||
reason_text = 'Patching operation in progress',
|
||||
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action = 'Complete reboots of affected hosts',
|
||||
service_affecting = False)
|
||||
fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_IN_PROGRESS,
|
||||
alarm_type=fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state=fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id=entity_instance_id,
|
||||
severity=fm_constants.FM_ALARM_SEVERITY_MINOR,
|
||||
reason_text='Patching operation in progress',
|
||||
probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action='Complete reboots of affected hosts',
|
||||
service_affecting=False)
|
||||
|
||||
self.fm_api.set_fault(fault)
|
||||
elif not raise_pip_alarm and pip_alarm is not None:
|
||||
@ -134,16 +132,16 @@ class PatchAlarmDaemon():
|
||||
entity_instance_id)
|
||||
if raise_obs_alarm and obs_alarm is None:
|
||||
logging.info("Raising obsolete-patch-in-system alarm")
|
||||
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_OBS_IN_SYSTEM,
|
||||
alarm_type = fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state = fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id = entity_instance_id,
|
||||
severity = fm_constants.FM_ALARM_SEVERITY_WARNING,
|
||||
reason_text = 'Obsolete patch in system',
|
||||
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action = 'Remove and delete obsolete patches',
|
||||
service_affecting = False)
|
||||
fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_OBS_IN_SYSTEM,
|
||||
alarm_type=fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state=fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id=entity_instance_id,
|
||||
severity=fm_constants.FM_ALARM_SEVERITY_WARNING,
|
||||
reason_text='Obsolete patch in system',
|
||||
probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action='Remove and delete obsolete patches',
|
||||
service_affecting=False)
|
||||
|
||||
self.fm_api.set_fault(fault)
|
||||
elif not raise_obs_alarm and obs_alarm is not None:
|
||||
@ -155,21 +153,20 @@ class PatchAlarmDaemon():
|
||||
entity_instance_id)
|
||||
if raise_cert_alarm and cert_alarm is None:
|
||||
logging.info("Raising developer-certificate-enabled alarm")
|
||||
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_NONSTANDARD_CERT_PATCH,
|
||||
alarm_type = fm_constants.FM_ALARM_TYPE_9,
|
||||
alarm_state = fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id = entity_instance_id,
|
||||
severity = fm_constants.FM_ALARM_SEVERITY_CRITICAL,
|
||||
reason_text = 'Developer patch certificate is enabled',
|
||||
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action = 'Reinstall system to disable certificate and remove untrusted patches',
|
||||
suppression = False,
|
||||
service_affecting = False)
|
||||
fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_NONSTANDARD_CERT_PATCH,
|
||||
alarm_type=fm_constants.FM_ALARM_TYPE_9,
|
||||
alarm_state=fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id=entity_instance_id,
|
||||
severity=fm_constants.FM_ALARM_SEVERITY_CRITICAL,
|
||||
reason_text='Developer patch certificate is enabled',
|
||||
probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action='Reinstall system to disable certificate and remove untrusted patches',
|
||||
suppression=False,
|
||||
service_affecting=False)
|
||||
|
||||
self.fm_api.set_fault(fault)
|
||||
|
||||
|
||||
def _get_handle_failed_hosts(self):
|
||||
url = "http://%s/patch/query_hosts" % self.api_addr
|
||||
|
||||
@ -202,16 +199,16 @@ class PatchAlarmDaemon():
|
||||
else:
|
||||
logging.info("Updating patch-host-install-failure alarm")
|
||||
|
||||
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_HOST_INSTALL_FAILED,
|
||||
alarm_type = fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state = fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id = entity_instance_id,
|
||||
severity = fm_constants.FM_ALARM_SEVERITY_MAJOR,
|
||||
reason_text = reason_text,
|
||||
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action = 'Undo patching operation',
|
||||
service_affecting = False)
|
||||
fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_HOST_INSTALL_FAILED,
|
||||
alarm_type=fm_constants.FM_ALARM_TYPE_5,
|
||||
alarm_state=fm_constants.FM_ALARM_STATE_SET,
|
||||
entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
|
||||
entity_instance_id=entity_instance_id,
|
||||
severity=fm_constants.FM_ALARM_SEVERITY_MAJOR,
|
||||
reason_text=reason_text,
|
||||
probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
|
||||
proposed_repair_action='Undo patching operation',
|
||||
service_affecting=False)
|
||||
self.fm_api.set_fault(fault)
|
||||
|
||||
elif patch_failed_alarm is not None:
|
||||
@ -220,4 +217,3 @@ class PatchAlarmDaemon():
|
||||
entity_instance_id)
|
||||
|
||||
return False
|
||||
|
||||
|
@ -9,11 +9,11 @@ SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(name='patch_alarm',
|
||||
version='1.0.0',
|
||||
description='CEPH alarm',
|
||||
license='Apache-2.0',
|
||||
packages=['patch_alarm'],
|
||||
entry_points={
|
||||
}
|
||||
setuptools.setup(
|
||||
name='patch_alarm',
|
||||
version='1.0.0',
|
||||
description='CEPH alarm',
|
||||
license='Apache-2.0',
|
||||
packages=['patch_alarm'],
|
||||
entry_points={}
|
||||
)
|
||||
|
29
tox.ini
29
tox.ini
@ -23,36 +23,13 @@ commands =
|
||||
-o \( -name .tox -prune \) \
|
||||
-o -type f -name '*.yaml' \
|
||||
-print0 | xargs -0 yamllint"
|
||||
bash -c "ls cgcs-patch/bin/*.sh | xargs bashate -v {posargs}"
|
||||
bash -c "ls cgcs-patch/bin/*.sh | xargs bashate -v -iE006 {posargs}"
|
||||
|
||||
[pep8]
|
||||
# Temporarily ignoring these warnings
|
||||
# E101 indentation contains mixed spaces and tabs
|
||||
# E116 unexpected indentation (comment)
|
||||
# E121 continuation line under-indented for hanging indent
|
||||
# E122 continuation line missing indentation or outdented
|
||||
# E123 closing bracket does not match indentation of opening bracket
|
||||
# E124 closing bracket does not match visual indentation
|
||||
# E126 continuation line over-indented for hanging indent
|
||||
# E127 continuation line over-indented for visual indent
|
||||
# E128 continuation line under-indented for visual indent
|
||||
# E129 visually indented line with same indent as next logical line
|
||||
# E203 whitespace before ':'
|
||||
# E211 whitespace before '('
|
||||
# E225 missing whitespace around operator
|
||||
# E226 missing whitespace around arithmetic operator
|
||||
# E228 missing whitespace around modulo operator
|
||||
# E231 missing whitespace after ':'
|
||||
# E241 multiple spaces after
|
||||
# E261 at least two spaces before inline comment
|
||||
# E265 block comment should start with '# '
|
||||
# E251 unexpected spaces around keyword / parameter equals
|
||||
# E302 expected 2 blank lines, found 1
|
||||
# E303 too many blank lines
|
||||
# Ignoring these warnings
|
||||
# E501 line too long
|
||||
# E712 comparison to bool should be reworded
|
||||
|
||||
ignore = E101,E116,E121,E123,E122,E124,E126,E127,E128,E129,E203,E211,E225,E226,E228,E231,E241,E251,E261,E265,E302,E303,E501,E712
|
||||
ignore = E501
|
||||
|
||||
[testenv:pep8]
|
||||
usedevelop = False
|
||||
|
@ -9,11 +9,11 @@ SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(name='tsconfig',
|
||||
version='1.0.0',
|
||||
description='tsconfig',
|
||||
license='Apache-2.0',
|
||||
packages=['tsconfig'],
|
||||
entry_points={
|
||||
}
|
||||
setuptools.setup(
|
||||
name='tsconfig',
|
||||
version='1.0.0',
|
||||
description='tsconfig',
|
||||
license='Apache-2.0',
|
||||
packages=['tsconfig'],
|
||||
entry_points={}
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user