Resolve bashate and pep8 warnings

This update addresses existing bashate and pep8 warnings in
cgcs-patch, patch-alarm, and tsconfig. The bulk of these updates
are style and spacing changes, such as whitespace style conventions.

Story: 2003371
Task: 24433

Change-Id: I44b26d24788907bac0730a952d70ed4bafb87d90
Signed-off-by: Don Penney <don.penney@windriver.com>
This commit is contained in:
Don Penney 2018-08-08 19:36:58 -05:00
parent e5afe88d43
commit ae0314279f
33 changed files with 358 additions and 394 deletions

View File

@ -13,14 +13,12 @@ SYSTEM_CHANGED_FLAG=/var/run/node_is_patched
logfile=/var/log/patching.log
function LOG()
{
function LOG {
logger "$NAME: $*"
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
}
if [ -f $SYSTEM_CHANGED_FLAG ]
then
if [ -f $SYSTEM_CHANGED_FLAG ]; then
LOG "Node has been patched. Failing goenabled check."
exit 1
fi

View File

@ -22,7 +22,7 @@ DAEMON="/usr/sbin/sw-patch-agent"
PIDFILE="/var/run/sw-patch-agent.pid"
PATCH_INSTALLING_FILE="/var/run/patch_installing"
start()
start()
{
if [ -e $PIDFILE ]; then
PIDDIR=/proc/$(cat $PIDFILE)
@ -47,18 +47,15 @@ start()
fi
}
stop()
stop()
{
if [ -f $PATCH_INSTALLING_FILE ]
then
if [ -f $PATCH_INSTALLING_FILE ]; then
echo "Patches are installing. Waiting for install to complete."
while [ -f $PATCH_INSTALLING_FILE ]
do
while [ -f $PATCH_INSTALLING_FILE ]; do
# Verify the agent is still running
pid=$(cat $PATCH_INSTALLING_FILE)
cat /proc/$pid/cmdline 2>/dev/null | grep -q $DAEMON
if [ $? -ne 0 ]
then
if [ $? -ne 0 ]; then
echo "Patch agent not running."
break
fi

View File

@ -21,7 +21,7 @@ DESC="sw-patch-controller-daemon"
DAEMON="/usr/sbin/sw-patch-controller-daemon"
PIDFILE="/var/run/sw-patch-controller-daemon.pid"
start()
start()
{
if [ -e $PIDFILE ]; then
PIDDIR=/proc/$(cat $PIDFILE)
@ -46,7 +46,7 @@ start()
fi
}
stop()
stop()
{
echo -n "Stopping $DESC..."
start-stop-daemon --stop --quiet --pidfile $PIDFILE

View File

@ -20,21 +20,17 @@ PATCHING_DIR=/opt/patching
logfile=/var/log/patching.log
function LOG()
{
function LOG {
logger "$NAME: $*"
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
}
function LOG_TO_FILE()
{
function LOG_TO_FILE {
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
}
function create_groups()
{
if [ -f $GROUPS_FILE ]
then
function create_groups {
if [ -f $GROUPS_FILE ]; then
return 0
fi
@ -45,11 +41,9 @@ function create_groups()
EOF
}
function do_setup()
{
function do_setup {
# Does the repo exist?
if [ ! -d $REPO_DIR ]
then
if [ ! -d $REPO_DIR ]; then
LOG "Creating repo"
mkdir -p $REPO_DIR
@ -59,8 +53,7 @@ function do_setup()
createrepo -g $GROUPS_FILE $REPO_DIR >> $logfile 2>&1
fi
if [ ! -d $PATCHING_DIR ]
then
if [ ! -d $PATCHING_DIR ]; then
LOG "Creating $PATCHING_DIR"
mkdir -p $PATCHING_DIR
fi
@ -68,8 +61,7 @@ function do_setup()
# If we can ping the active controller, sync the repos
LOG_TO_FILE "ping -c 1 -w 1 controller"
ping -c 1 -w 1 controller >> $logfile 2>&1 || ping6 -c 1 -w 1 controller >> $logfile 2>&1
if [ $? -ne 0 ]
then
if [ $? -ne 0 ]; then
LOG "Cannot ping controller. Nothing to do"
return 0
fi

View File

@ -15,15 +15,12 @@ NAME=$(basename $0)
logfile=/var/log/patching.log
function LOG_TO_FILE()
{
function LOG_TO_FILE {
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
}
function check_for_rr_patch()
{
if [ -f /var/run/node_is_patched_rr ]
then
function check_for_rr_patch {
if [ -f /var/run/node_is_patched_rr ]; then
echo
echo "Node has been patched and requires an immediate reboot."
echo
@ -32,14 +29,11 @@ function check_for_rr_patch()
fi
}
function check_install_uuid()
{
function check_install_uuid {
# Check whether our installed load matches the active controller
CONTROLLER_UUID=`curl -sf http://controller/feed/rel-${SW_VERSION}/install_uuid`
if [ $? -ne 0 ]
then
if [ "$HOSTNAME" = "controller-1" ]
then
if [ $? -ne 0 ]; then
if [ "$HOSTNAME" = "controller-1" ]; then
# If we're on controller-1, controller-0 may not have the install_uuid
# matching this release, if we're in an upgrade. If the file doesn't exist,
# bypass this check
@ -51,8 +45,7 @@ function check_install_uuid()
return 1
fi
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]
then
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]; then
LOG_TO_FILE "This node is running a different load than the active controller and must be reinstalled"
echo "This node is running a different load than the active controller and must be reinstalled"
return 1
@ -69,8 +62,7 @@ if [ -f /etc/platform/installation_failed ] ; then
fi
# Clean up the RPM DB
if [ ! -f /var/run/.rpmdb_cleaned ]
then
if [ ! -f /var/run/.rpmdb_cleaned ]; then
LOG_TO_FILE "Cleaning RPM DB"
rm -f /var/lib/rpm/__db*
touch /var/run/.rpmdb_cleaned
@ -82,28 +74,24 @@ fi
DELAY_SEC=120
START=`date +%s`
FOUND=0
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]
do
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]; do
ping -c 1 controller > /dev/null 2>&1 || ping6 -c 1 controller > /dev/null 2>&1
if [ $? -eq 0 ]
then
if [ $? -eq 0 ]; then
FOUND=1
break
fi
sleep 1
done
if [ ${FOUND} -eq 0 ]
then
# 'controller' is not available, just exit
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
exit 1
if [ ${FOUND} -eq 0 ]; then
# 'controller' is not available, just exit
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
exit 1
fi
case "$1" in
start)
if [ "${system_mode}" = "simplex" ]
then
if [ "${system_mode}" = "simplex" ]; then
# On a simplex CPE, we need to launch the http server first,
# before we can do the patch installation
LOG_TO_FILE "***** Launching lighttpd *****"
@ -117,8 +105,7 @@ case "$1" in
/etc/init.d/lighttpd stop
else
check_install_uuid
if [ $? -ne 0 ]
then
if [ $? -ne 0 ]; then
# The INSTALL_UUID doesn't match the active controller, so exit
exit 1
fi

View File

@ -35,11 +35,11 @@ LOCAL_PATCH_DATA_DIR = "export/patch_data"
ORDER_FILE = "patch_order"
ARCH_DEFAULT = "x86_64"
METADATA_TAGS = [ 'ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION',
'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS',
'UNREMOVABLE', 'REBOOT_REQUIRED' ]
RMP_EXCLUDES = [ '-dev-', '-dbg-', '-doc-' ]
BUILD_TYPES = [ 'std', 'rt' ]
METADATA_TAGS = ['ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION',
'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS',
'UNREMOVABLE', 'REBOOT_REQUIRED']
RMP_EXCLUDES = ['-dev-', '-dbg-', '-doc-']
BUILD_TYPES = ['std', 'rt']
SAME = 0
@ -74,7 +74,7 @@ capture_source_flag = False
capture_rpms_flag = False
capture_source_path = None
logfile = "/var/log/patching.log"
LOG = logging.getLogger(__name__)
@ -96,21 +96,22 @@ def configure_logging(logtofile=True, level=logging.DEBUG):
else:
logging.basicConfig(level=level)
def rev_lt(num1, num2):
n1w=num1.split('.')
n2w=num2.split('.')
n1w = num1.split('.')
n2w = num2.split('.')
while True:
try:
n1=int(n1w.pop(0))
except:
n1 = int(n1w.pop(0))
except:
return True
try:
n2=int(n2w.pop(0))
except:
n2 = int(n2w.pop(0))
except:
return False
if n1<n2:
if n1 < n2:
return True
if n1>n2:
if n1 > n2:
return False
@ -126,6 +127,7 @@ def add_text_tag_to_xml(parent, name, text):
tag.text = text
return tag
def handle_exception(exc_type, exc_value, exc_traceback):
"""
Exception handler to log any uncaught exceptions
@ -134,6 +136,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):
exc_info=(exc_type, exc_value, exc_traceback))
sys.__excepthook__(exc_type, exc_value, exc_traceback)
def write_xml_file(top, fname):
# Generate the file, in a readable format if possible
outfile = open(fname, 'w')
@ -147,6 +150,7 @@ def write_xml_file(top, fname):
else:
outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" "))
class PatchRecipeError(Exception):
"""Base class for patch recipe exceptions."""
@ -156,26 +160,32 @@ class PatchRecipeError(Exception):
def __str__(self):
return self.message or ""
class PatchRecipeXMLFail(PatchRecipeError):
"""Problem parsing XML of patch recipe."""
pass
class PatchBuildFail(PatchRecipeError):
"""Problem Compiling the patch."""
pass
class PatchPackagingFail(PatchRecipeError):
"""Problem assembling the patch."""
pass
class PatchPackagingMiss(PatchRecipeError):
"""Problem assembling the patch - might be correctable."""
pass
class PatchRequirementFail(PatchRecipeError):
"""Missing Requirement."""
pass
class PatchRecipeCmdFail(PatchRecipeError):
"""Shell command Failure."""
pass
@ -241,7 +251,7 @@ class PatchList:
if patch == patch_id:
return self.patch_data[patch]
return None
def _validate_patch_order(self):
fix_local_order = False
remote_order = []
@ -266,7 +276,7 @@ class PatchList:
break
if fix_local_order:
print "_validate_patch_order: fix patch order"
f = open(self._std_local_path(self.order_file),'w')
f = open(self._std_local_path(self.order_file), 'w')
for patch_id in validated_order:
f.write("%s\n" % patch_id)
print "_validate_patch_order: %s" % patch_id
@ -282,7 +292,7 @@ class PatchList:
os.chdir(workdir)
issue_cmd("mkdir -p %s" % self._std_remote_copy_path(""))
os.chdir(self._std_remote_copy_path(""))
if not os.path.isdir(self.patch_git):
issue_cmd("git clone ssh://%s@vxgit.wrs.com:7999/cgcs/%s.git" % (os.environ['USER'], self.patch_git))
os.chdir(self.patch_git)
@ -327,7 +337,7 @@ class PatchList:
for patch_id in self.patches_to_deliver:
os.chdir(workdir)
patch = "%s.patch" % patch_id
print "signing patch '%s'" % self._std_local_path(patch)
print "signing patch '%s'" % self._std_local_path(patch)
try:
subprocess.check_call(["sign_patch_formal.sh", self._std_local_path(patch)])
@ -377,7 +387,6 @@ class PatchList:
xml_path = self._std_local_path(self._std_xml_patch_recipe_name(patch_id))
self.add(xml_path, built=True, fix=False)
def get_implicit_requires(self, patch_id, recipies):
list = []
for r in recipies:
@ -513,7 +522,6 @@ class PatchList:
prd.gen_xml(fname=self._std_local_path(self._std_xml_patch_recipe_name(prd.patch_id)))
def build_patches(self):
global capture_source_flag
# While unbuild patches exist
@ -527,7 +535,7 @@ class PatchList:
if rc:
# This patch is ready to build, build it now
print "Ready to build patch %s." % patch_id
rc = prd.build_patch()
rc = prd.build_patch()
if rc:
# append new built patch to order file
issue_cmd("sed -i '/^%s$/d' %s" % (patch_id, self._std_local_path(self.order_file)))
@ -544,8 +552,8 @@ class PatchList:
if capture_source_flag:
prd.capture_source()
# It is important to break here.
# We just edited the patches_to_build which an enclosing for loop is iterating over.
# It is important to break here.
# We just edited the patches_to_build which an enclosing for loop is iterating over.
# without the break, the result is skipping patches and/or building patches out of order.
break
else:
@ -635,7 +643,6 @@ class PackageData:
raise PatchRecipeXMLFail(msg)
sys.exit(2)
def gen_xml(self, e_package):
for personality in self.personalities:
add_text_tag_to_xml(e_package, 'PERSONALITY', personality)
@ -670,17 +677,17 @@ class PackageData:
file_path = "%s/%s" % (rpm_dir, file)
if os.path.isfile(file_path):
print "cleaning match %s\n" % file
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path ]
rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path]
rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
if rpm_name == self.name:
rpm_release_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path ]
rpm_release_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path]
rpm_release = issue_cmd_w_stdout(rpm_release_cmd)
print "cleaning release %s" % rpm_release
rm_cmd = "rm -f %s/%s-*-%s.%s.rpm" % (rpm_dir, self.name, rpm_release, arch)
issue_cmd(rm_cmd)
def clean(self, prebuilt=False):
print "package clean"
print "package clean"
self._clean_rpms(prebuilt=prebuilt)
def _add_rpms(self, pf, arch=ARCH_DEFAULT, fatal=True, prebuilt=False):
@ -738,7 +745,7 @@ class PackageData:
# break
if not reject:
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file) ]
rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file)]
rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
if rpm_name != self.name:
print "reject file '%s' due to rpm_name '%s'" % (file, rpm_name)
@ -784,6 +791,7 @@ class PackageData:
raise PatchPackagingFail(msg)
sys.exit(2)
class RecipeData:
"""
Recipe data
@ -791,7 +799,7 @@ class RecipeData:
def __init__(self, e):
self.name = None
self.prebuilt = False
self.packages = collections.OrderedDict() # map package name to PackageData
self.packages = collections.OrderedDict() # map package name to PackageData
self._parse_recipe(e)
def __str__(self):
@ -863,7 +871,7 @@ class RecipeData:
self.packages[package].gen_xml(e_package)
def clean(self):
print "recipe clean"
print "recipe clean"
if not self.prebuilt:
for package in self.packages:
self.packages[package].clean(prebuilt=self.prebuilt)
@ -890,7 +898,7 @@ class RecipeData:
if os.path.isfile(path):
rc = issue_cmd_rc("%s %s %s >> %s/%s.log" % (path, self.name, extra_arg, os.environ['DEST'], os.environ['PREFIX']))
def build_patch(self, pf, fatal=True):
for package in self.packages:
self.packages[package].build_patch(pf, fatal=fatal, prebuilt=self.prebuilt)
@ -903,6 +911,7 @@ class RecipeData:
print "=========== is_prebuilt prebuilt=%s for %s =============" % (self.prebuilt, self.name)
return self.prebuilt
class PatchRecipeData:
"""
Patch recipe data
@ -951,7 +960,6 @@ class PatchRecipeData:
rc = rc2
return rc
def set_implicit_requires(self, patch_list):
self.auto_requires = patch_list.get_implicit_requires(self.patch_id, self.recipies.keys())
@ -1100,8 +1108,8 @@ class PatchRecipeData:
def recursive_print(self, e, depth=0):
for child in e:
print "%sTag: %s, attr: %s, text: %s" % (" "*depth, child.tag, child.attrib, child.text and child.text.strip() or "")
self.recursive_print(child.getchildren(), depth+1)
print "%sTag: %s, attr: %s, text: %s" % (" " * depth, child.tag, child.attrib, child.text and child.text.strip() or "")
self.recursive_print(child.getchildren(), depth + 1)
# for child in e.iter('BUILD'):
# print "Tag: %s, attr: %s" % (child.tag, child.attrib)
@ -1162,7 +1170,7 @@ class PatchRecipeData:
write_xml_file(e_top, fname)
def __str__(self):
return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies,keys()))
return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies, keys()))
def myprint(self, indent=""):
print "patch_id: %s" % str(self.patch_id)
@ -1205,7 +1213,7 @@ class PatchRecipeData:
if self.build_context is not None:
# Before checkout, make sure there are no untracked temporary files
# left by a previous build that may prevent the checkout...
# left by a previous build that may prevent the checkout...
# e.g. horizon's pbr-2015.1.0-py2.7.egg directory is a build artifact
issue_cmd("for d in $(find . -type d -name .git | xargs --max-args=1 dirname); do (cd $d; echo $d; git clean -df; git reset --hard; git ls-files --others --exclude-standard | xargs --no-run-if-empty rm; if [ ! -f .subgits ]; then if [ -f .gitignore ]; then git ls-files --others --ignored --exclude-from=.gitignore | xargs --no-run-if-empty rm; fi; fi); done")
issue_cmd("wrgit checkout %s" % self.build_context)
@ -1229,7 +1237,6 @@ class PatchRecipeData:
return True
def _get_prev_patch_id(self, patch_id):
patch_order_file = self.pl._std_local_path(self.pl.order_file)
prev_patch_id = None
@ -1240,7 +1247,7 @@ class PatchRecipeData:
return prev_patch_id
prev_patch_id = this_patch_id
return prev_patch_id
def _get_rpm_db_path(self, patch_id):
rpm_db = self.pl._std_local_path("%s.rpm_db" % patch_id)
return rpm_db
@ -1257,7 +1264,7 @@ class PatchRecipeData:
issue_cmd("rpm -qp --dbpath %s --queryformat '%s %%{NAME} %%{RELEASE}\n' %s/*rpm >> %s 2> /dev/null" % (temp_rpm_db_dir, subdir, rpm_sub_dir, rpm_db))
def _read_rpm_db(self, patch_id):
release_map={}
release_map = {}
rpm_db_dir = "export/patch_data"
rpm_db = self._get_rpm_db_path(patch_id)
with open(rpm_db) as f:
@ -1276,7 +1283,7 @@ class PatchRecipeData:
delim = "_"
words = self.patch_id.split(delim)
l = len(words[-1])
words[-1] = '0'*l
words[-1] = '0' * l
prev_patch_id = delim.join(words)
prev_release_map = self._read_rpm_db(prev_patch_id)
release_map = self._read_rpm_db(self.patch_id)
@ -1290,7 +1297,7 @@ class PatchRecipeData:
os.environ['DEST'] = "%s/export/patch_source/%s" % (os.environ['MY_PATCH_WORKSPACE'], self.patch_id)
issue_cmd("mkdir -p %s" % os.environ['DEST'])
for recipe in self.recipies.keys():
print "capture source of recipe %s" % recipe
print "capture source of recipe %s" % recipe
self.recipies[recipe].capture_source()
def build_patch(self, local_path="."):
@ -1305,8 +1312,8 @@ class PatchRecipeData:
recipe_str += recipe + " "
if not self.recipies[recipe].is_prebuilt():
build_recipe_str += recipe + " "
print "recipe_str = %s" % recipe_str
print "build_recipe_str = %s" % build_recipe_str
print "recipe_str = %s" % recipe_str
print "build_recipe_str = %s" % build_recipe_str
if recipe_str == "":
msg = "No recipies for patch %s" % self.patch_id
LOG.exception(msg)
@ -1326,15 +1333,15 @@ class PatchRecipeData:
if not pre_compiled_flag:
# compile patch
os.chdir(workdir)
print "pre clean"
print "pre clean"
if build_recipe_str == "":
print " ... nothing to clean"
else:
issue_cmd("build-pkgs --no-build-info --clean %s" % build_recipe_str)
for recipe in self.recipies.keys():
print "pre clean recipe %s" % recipe
print "pre clean recipe %s" % recipe
self.recipies[recipe].clean()
print "Build"
print "Build"
if build_recipe_str == "":
print " ... nothing to build"
else:
@ -1377,7 +1384,7 @@ class PatchRecipeData:
if not pre_compiled_flag:
self.recipies[recipe].build_patch(pf, fatal=True)
else:
try:
try:
self.recipies[recipe].build_patch(pf, fatal=False)
except PatchPackagingMiss:
print "Warning: attempting rebuild of recipe %s" % self.recipies[recipe].name
@ -1385,20 +1392,21 @@ class PatchRecipeData:
issue_cmd("build-pkgs --no-build-info --careful %s" % self.recipies[recipe].name)
self.recipies[recipe].build_patch(pf, fatal=True)
local_path=self.pl._std_local_path("")
local_path = self.pl._std_local_path("")
print "=== local_path = %s ===" % local_path
pf.gen_patch(outdir=local_path)
return True
def _tag_build_context():
os.chdir(srcdir)
issue_cmd("for e in . `wrgit all-core-gits` ; do (cd $e ; git tag v%s) done" % self.patch_id)
def read_build_info():
try:
build_info_find_cmd = [ "find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm" ]
build_info_find_cmd = ["find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm"]
build_info_path = issue_cmd_w_stdout(build_info_find_cmd)
if build_info_path == "":
issue_cmd("build-pkgs --no-descendants build-info")
@ -1411,7 +1419,7 @@ def read_build_info():
continue
if len(line) == 0:
continue
name, var = line.partition("=")[::2]
name = name.strip()
var = var.strip()
@ -1422,10 +1430,12 @@ def read_build_info():
return False
return True
def patch_id_to_tag(patch_id):
tag = "v%s" % patch_id
return tag
def validate_tag(tag):
try:
cmd = "git tag | grep %s" % tag
@ -1437,6 +1447,7 @@ def validate_tag(tag):
return False
return True
def issue_cmd_w_stdout(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate()[0]
@ -1447,7 +1458,7 @@ def issue_cmd_w_stdout(cmd):
print msg
raise PatchRecipeCmdFail(msg)
return out
def issue_cmd(cmd):
print "CMD: %s" % cmd
@ -1458,6 +1469,7 @@ def issue_cmd(cmd):
print msg
raise PatchRecipeCmdFail(msg)
def issue_cmd_no_raise(cmd):
print "CMD: %s" % cmd
rc = subprocess.call(cmd, shell=True)
@ -1466,11 +1478,13 @@ def issue_cmd_no_raise(cmd):
LOG.exception(msg)
print msg
def issue_cmd_rc(cmd):
print "CMD: %s" % cmd
rc = subprocess.call(cmd, shell=True)
return rc
def set_capture_source_path():
global capture_source_path
my_repo = None
@ -1491,6 +1505,7 @@ def set_capture_source_path():
if rc == 0:
capture_source_path = new_path
def capture_rpms():
for build_type in BUILD_TYPES:
src_rpm_dir = "%s/%s/%s" % (workdir, build_type, RPM_DIR)
@ -1499,12 +1514,14 @@ def capture_rpms():
issue_cmd("mkdir -p %s" % dest_rpm_dir)
issue_cmd("rsync -avu %s/*.rpm %s" % (src_rpm_dir, dest_rpm_dir))
def modify_patch_usage():
msg = "modify_patch [ --obsolete | --released | --development ] [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ]"
LOG.exception(msg)
print msg
sys.exit(1)
def modify_patch():
global workdir
global temp_rpm_db_dir
@ -1523,11 +1540,10 @@ def modify_patch():
'sw_version=',
'id=',
'file=',
])
])
except getopt.GetoptError as e:
print str(e)
modify_patch_usage()
patch_path = None
cwd = os.getcwd()
@ -1572,7 +1588,7 @@ def modify_patch():
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
if patch_path is not None:
rc = PatchFile.modify_patch(patch_path, "status", new_status)
assert(rc == True)
assert(rc)
print "Patch '%s' has been modified to status '%s'" % (patch_path, new_status)
else:
if sw_version is None or patch_id is None:
@ -1588,7 +1604,7 @@ def modify_patch():
print "patch_file_name = %s" % patch_file_name
print "patch_path = %s" % patch_path
rc = PatchFile.modify_patch(patch_path, "status", new_status)
assert(rc == True)
assert(rc)
os.chdir(pl._std_patch_git_path(".."))
issue_cmd("git add %s" % patch_path)
issue_cmd("git commit -m \"Modify status of patch '%s' to '%s'\"" % (patch_id, new_status))
@ -1652,7 +1668,7 @@ def modify_patch():
human_release = "Titanium Cloud 4"
windshare_folder = "Titanium-Cloud-4"
if sw_version == "18.03" || sw_version == "18.03"
if sw_version == "18.03" or sw_version == "18.03":
local_dest = "/folk/cgts/rel-ops/%s/patches/" % sw_version
deliver_dest = "/folk/prj-wrlinux/release/tis/tis-5/update/ti%s-%s/Titanium-Cloud-5/patches" % (ts, munged_patch_id)
human_release = "Titanium Cloud 5"
@ -1700,7 +1716,7 @@ def modify_patch():
print "Failed to modify patch!"
finally:
shutil.rmtree(workdir)
def query_patch_usage():
msg = "query_patch [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ] [ --field <field_name> ]"
@ -1711,6 +1727,7 @@ def query_patch_usage():
print msg
sys.exit(1)
def query_patch():
global workdir
global temp_rpm_db_dir
@ -1727,12 +1744,11 @@ def query_patch():
'id=',
'file=',
'field=',
])
])
except getopt.GetoptError as e:
print str(e)
query_patch_usage()
patch_path = None
cwd = os.getcwd()
field = None
@ -1758,7 +1774,7 @@ def query_patch():
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
if patch_path is not None:
answer = PatchFile.query_patch(patch_path, field=field)
field_order=['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings']
field_order = ['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings']
for k in field_order:
if k in answer.keys():
print "%s: '%s'" % (k, answer[k])
@ -1794,6 +1810,7 @@ def make_patch_usage():
print msg
sys.exit(1)
def make_patch():
global workdir
global temp_rpm_db_dir
@ -1825,7 +1842,7 @@ def make_patch():
'srcdir=',
'branch=',
'sw_version=',
])
])
except getopt.GetoptError as e:
print str(e)
make_patch_usage()
@ -1894,14 +1911,14 @@ def make_patch():
# TODO if branch is not None or workdir is not None or srcdir is not None:
# TODO print "If --formal is specified, then srcdir, workdir and branch are automatci and must not be specified"
# TODO make_patch_usage()
if pre_compiled_flag and formal_flag:
print "invalid options: --formal and --pre-compiled can't be used together."
make_patch_usage()
if workdir is not None:
if not os.path.isdir(workdir):
print "invalid directory: workdir = '%s'" % workdir
print "invalid directory: workdir = '%s'" % workdir
make_patch_usage()
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
@ -1916,7 +1933,6 @@ def make_patch():
print "invalid patch file path: '%s'" % patch
make_patch_usage()
if 'MY_REPO' in os.environ:
MY_REPO = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_REPO'])))
else:
@ -1952,7 +1968,7 @@ def make_patch():
else:
print "ERROR: environment variable 'MY_BUILD_CFG' is not defined"
sys.exit(1)
if 'MY_BUILD_DIR' in os.environ:
MY_BUILD_DIR = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_BUILD_DIR'])))
else:
@ -1989,6 +2005,5 @@ def make_patch():
# sign formal patch
pl.sign_official_patches()
# deliver to git repo
# deliver to git repo
pl.deliver_official_patch()

View File

@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""

View File

@ -9,19 +9,19 @@ from oslo_config import cfg
API_SERVICE_OPTS = [
cfg.StrOpt('api_bind_ip',
default='127.0.0.1',
help='IP for the Patching controller API server to bind to',
),
cfg.IntOpt('api_port',
default=5487,
help='The port for the Patching controller API server',
),
cfg.IntOpt('api_limit_max',
cfg.StrOpt('api_bind_ip',
default='127.0.0.1',
help='IP for the Patching controller API server to bind to',
),
cfg.IntOpt('api_port',
default=5487,
help='The port for the Patching controller API server',
),
cfg.IntOpt('api_limit_max',
default=1000,
help='the maximum number of items returned in a single '
'response from a collection resource'),
]
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api',

View File

@ -5,12 +5,10 @@ SPDX-License-Identifier: Apache-2.0
"""
#from oslo.config import cfg
import pecan
from cgcs_patch.api import config
#CONF = cfg.CONF
def get_pecan_config():
# Set up the pecan configuration
@ -30,7 +28,7 @@ def setup_app(pecan_config=None):
template_path=pecan_config.app.template_path,
debug=False,
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
guess_content_type_from_ext=False, # Avoid mime-type lookup
guess_content_type_from_ext=False, # Avoid mime-type lookup
)
return app

View File

@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""

View File

@ -16,6 +16,7 @@ from cgcs_patch.patch_controller import pc
import logging
from cgcs_patch.patch_functions import LOG
class PatchAPIController(object):
@expose('json')
@ -107,7 +108,7 @@ class PatchAPIController(object):
# currently 64K chunk size is selected
dst = os.open(fn, os.O_WRONLY | os.O_CREAT)
src = fileitem.file.fileno()
size = 64*1024
size = 64 * 1024
n = size
while n >= size:
s = os.read(src, size)
@ -263,4 +264,3 @@ class RootController(object):
patch = PatchAPIController()
v1 = PatchAPIController()

View File

@ -6,16 +6,16 @@
from oslo_config import cfg
API_SERVICE_OPTS = [
cfg.StrOpt('auth_api_bind_ip',
default=None,
help='IP for the authenticated Patching API server to bind to'),
cfg.IntOpt('auth_api_port',
default=5491,
help='The port for the authenticated Patching API server'),
cfg.IntOpt('api_limit_max',
default=1000,
help='the maximum number of items returned in a single '
'response from a collection resource')
cfg.StrOpt('auth_api_bind_ip',
default=None,
help='IP for the authenticated Patching API server to bind to'),
cfg.IntOpt('auth_api_port',
default=5491,
help='The port for the authenticated Patching API server'),
cfg.IntOpt('api_limit_max',
default=1000,
help='the maximum number of items returned in a single '
'response from a collection resource')
]
CONF = cfg.CONF

View File

@ -4,13 +4,15 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""
"""Access Control Lists (ACL's) control access the API server."""
from cgcs_patch.authapi import auth_token
OPT_GROUP_NAME = 'keystone_authtoken'
"""Access Control Lists (ACL's) control access the API server."""
def install(app, conf, public_routes):
"""Install ACL check on application.
@ -21,7 +23,7 @@ def install(app, conf, public_routes):
:return: The same WSGI application with ACL installed.
"""
keystone_config = dict(conf.items(OPT_GROUP_NAME))
return auth_token.AuthTokenMiddleware(app,
conf=keystone_config,

View File

@ -17,9 +17,9 @@ import ConfigParser
auth_opts = [
cfg.StrOpt('auth_strategy',
default='keystone',
help='Method to use for auth: noauth or keystone.'),
]
default='keystone',
help='Method to use for auth: noauth or keystone.'),
]
CONF = cfg.CONF
CONF.register_opts(auth_opts)
@ -58,7 +58,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
debug=False,
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
hooks=app_hooks,
guess_content_type_from_ext=False, # Avoid mime-type lookup
guess_content_type_from_ext=False, # Avoid mime-type lookup
)
if pecan_config.app.enable_acl:

View File

@ -30,6 +30,7 @@ from sysinv.openstack.common import policy
_POLICY_PATH = None
_POLICY_CACHE = {}
def reset():
global _POLICY_PATH
global _POLICY_CACHE

View File

@ -146,7 +146,7 @@ class PatchService:
def audit_socket(self):
# Ensure multicast address is still allocated
cmd = "ip maddr show %s | awk 'BEGIN { ORS=\"\" }; {if ($2 == \"%s\") print $2}'" % \
cmd = "ip maddr show %s | awk 'BEGIN {ORS=\"\"}; {if ($2 == \"%s\") print $2}'" % \
(cfg.get_mgmt_iface(), self.mcast_addr)
try:
result = subprocess.check_output(cmd, shell=True)
@ -163,4 +163,3 @@ class PatchService:
LOG.info("Unable to setup sockets. Waiting to retry")
time.sleep(5)
LOG.info("Multicast address reconfigured")

View File

@ -27,7 +27,7 @@ dev_certificate = b"""-----BEGIN CERTIFICATE-----
Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u
-----END CERTIFICATE-----"""
formal_certificate=b"""-----BEGIN CERTIFICATE-----
formal_certificate = b"""-----BEGIN CERTIFICATE-----
MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT

View File

@ -26,6 +26,7 @@ platform_conf_mtime = 0
patching_conf_mtime = 0
patching_conf = '/etc/patching/patching.conf'
def read_config():
global patching_conf_mtime
global patching_conf
@ -122,5 +123,3 @@ def get_mgmt_iface():
logging.exception("Failed to read management_interface from config")
return None
return mgmt_if

View File

@ -5,6 +5,7 @@ SPDX-License-Identifier: Apache-2.0
"""
class PatchError(Exception):
"""Base class for patching exceptions."""
@ -42,4 +43,4 @@ class PatchValidationFailure(PatchError):
class PatchMismatchFailure(PatchError):
"""Patch validation error."""
pass
pass

View File

@ -46,18 +46,18 @@ run_insvc_patch_scripts_cmd = "/usr/sbin/run-patch-scripts"
pa = None
# Smart commands
smart_cmd = [ "/usr/bin/smart" ]
smart_quiet = smart_cmd + [ "--quiet" ]
smart_update = smart_quiet + [ "update" ]
smart_newer = smart_quiet + [ "newer" ]
smart_orphans = smart_quiet + [ "query", "--orphans", "--show-format", "$name\n" ]
smart_query = smart_quiet + [ "query" ]
smart_query_repos = smart_quiet + [ "query", "--channel=base", "--channel=updates" ]
smart_install_cmd = smart_cmd + [ "install", "--yes", "--explain" ]
smart_remove_cmd = smart_cmd + [ "remove", "--yes", "--explain" ]
smart_query_installed = smart_quiet + [ "query", "--installed", "--show-format", "$name $version\n" ]
smart_query_base = smart_quiet + [ "query", "--channel=base", "--show-format", "$name $version\n" ]
smart_query_updates = smart_quiet + [ "query", "--channel=updates", "--show-format", "$name $version\n" ]
smart_cmd = ["/usr/bin/smart"]
smart_quiet = smart_cmd + ["--quiet"]
smart_update = smart_quiet + ["update"]
smart_newer = smart_quiet + ["newer"]
smart_orphans = smart_quiet + ["query", "--orphans", "--show-format", "$name\n"]
smart_query = smart_quiet + ["query"]
smart_query_repos = smart_quiet + ["query", "--channel=base", "--channel=updates"]
smart_install_cmd = smart_cmd + ["install", "--yes", "--explain"]
smart_remove_cmd = smart_cmd + ["remove", "--yes", "--explain"]
smart_query_installed = smart_quiet + ["query", "--installed", "--show-format", "$name $version\n"]
smart_query_base = smart_quiet + ["query", "--channel=base", "--show-format", "$name $version\n"]
smart_query_updates = smart_quiet + ["query", "--channel=updates", "--show-format", "$name $version\n"]
def setflag(fname):
@ -334,7 +334,7 @@ class PatchAgent(PatchService):
# Get the current channel config
try:
output = subprocess.check_output(smart_cmd +
[ "channel", "--yaml" ],
["channel", "--yaml"],
stderr=subprocess.STDOUT)
config = yaml.load(output)
except subprocess.CalledProcessError as e:
@ -345,18 +345,18 @@ class PatchAgent(PatchService):
LOG.exception("Failed to query channels")
return False
expected = [ { 'channel': 'rpmdb',
'type': 'rpm-sys',
'name': 'RPM Database',
'baseurl': None },
{ 'channel': 'base',
'type': 'rpm-md',
'name': 'Base',
'baseurl': "http://controller/feed/rel-%s" % SW_VERSION},
{ 'channel': 'updates',