diff --git a/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-binary.xml b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-binary.xml
new file mode 100644
index 00000000..cc1a9449
--- /dev/null
+++ b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-binary.xml
@@ -0,0 +1,33 @@
+
+
+ binary_sample_patch
+ 1.0.0
+ 0001
+ starlingx
+ sample patch test
+ Reboot required patch
+ Sample instructions
+ Sample warning
+
+ Y
+ N
+ DEV
+
+
+
+
+
+ scripts/pre-install.sh
+ scripts/post-install.sh
+
+
+
+
+
+
+ curl
+
+
\ No newline at end of file
diff --git a/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-insvc.xml b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-insvc.xml
new file mode 100644
index 00000000..2a37435f
--- /dev/null
+++ b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-insvc.xml
@@ -0,0 +1,31 @@
+
+
+ sample_patch
+ 1.0.0
+ 0001
+ starlingx
+ sample patch test
+ In Service patch
+ Sample instructions
+ Sample warning
+
+ N
+ N
+ DEV
+
+
+
+
+
+
+
+
+ sysvinv
+ linux
+ linux-rt
+
+
+
+ curl
+
+
\ No newline at end of file
diff --git a/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-large.xml b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-large.xml
new file mode 100644
index 00000000..9e14bc97
--- /dev/null
+++ b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample-large.xml
@@ -0,0 +1,446 @@
+
+
+ large_sample_patch
+ 1.0.0
+ 0001
+ starlingx
+ Large sample patch test
+ Reboot required patch
+ Sample instructions
+ Sample warning
+
+ Y
+ N
+ DEV
+
+
+
+
+
+ scripts/pre-install.sh
+ scripts/post-install.sh
+
+
+
+ stx-platform-helm
+ nfv-client
+ nfv-common
+ nfv-plugins
+ nfv-tools
+ nfv-vim
+ nova-api-proxy
+ distributedcloud-dccommon
+ distributedcloud-dcdbsync
+ distributedcloud-dcmanager
+ distributedcloud-dcorch
+ sm-api
+ sm-client
+ sm-tools
+ shadow-utils-config
+ sm
+ sm-common-libs
+ sm-common
+ sm-eru
+ sm-db
+ stx-ocf-scripts
+ puppet-dcdbsync
+ puppet-dcmanager
+ puppet-dcorch
+ puppet-fm
+ puppet-manifests
+ puppet-mtce
+ puppet-nfv
+ puppet-patching
+ puppet-smapi
+ puppet-sshd
+ puppet-sysinv
+ stx-ptp-notification-helm
+ bnxt-en
+ i40e
+ i40e-cvl-2.54
+ i40e-cvl-4.10
+ iavf
+ iavf-cvl-2.54
+ iavf-cvl-4.10
+ ice
+ ice-cvl-2.54
+ ice-cvl-4.10
+ octeon-ep
+ igb-uio
+ initramfs-trigger
+ iqvlinux
+ kmod-opae-fpga-driver
+ libbnxt-re
+ linux-compiler-gcc-10-x86
+ linux-headers-5.10.0-6-amd64
+ linux-headers-5.10.0-6-common
+ linux-image-5.10.0-6-amd64-unsigned
+ linux-kbuild-5.10
+ linux-libc-dev
+ linux-perf-5.10
+ mlnx-ofed-kernel-modules
+ mlnx-ofed-kernel-utils
+ qat1.7.l
+ ibacm
+ ibverbs-providers
+ ibverbs-utils
+ infiniband-diags
+ libibmad5
+ libibnetdisc5
+ libibumad3
+ libibverbs1
+ librdmacm1
+ python3-pyverbs
+ rdma-core
+ rdmacm-utils
+ srptools
+ mstflint
+ stx-cert-manager-helm
+ stx-istio-helm
+ starlingx-dashboard
+ stx-sts-silicom-helm
+ stx-oran-o2-helm
+ python3-fm-api
+ fm-common-doc
+ fm-common
+ fm-doc
+ fm-mgr
+ fm-rest-api
+ python3-fmclient
+ stx-openstack-helm-fluxcd
+ stx-nginx-ingress-controller-helm
+ barbican-api
+ barbican-common
+ barbican-doc
+ barbican-keystone-listener
+ barbican-worker
+ python3-barbican
+ keystone-doc
+ keystone
+ python3-keystone
+ openstack-resource-agents
+ python-aodhclient-doc
+ python3-aodhclient
+ python3-barbicanclient
+ python3-cinderclient
+ python-glanceclient-doc
+ python3-glanceclient
+ python-gnocchiclient-doc
+ python3-gnocchiclient
+ python3-heatclient
+ horizon-doc
+ openstack-dashboard-apache
+ openstack-dashboard
+ python3-django-horizon
+ python-ironicclient-doc
+ python3-ironicclient
+ python-keystoneclient-doc
+ python3-keystoneclient
+ python3-neutronclient
+ python-novaclient-doc
+ python3-novaclient
+ python-openstackclient-doc
+ python3-openstackclient
+ python-openstacksdk-doc
+ python3-openstacksdk
+ python-osc-lib-doc
+ python3-osc-lib
+ python3-oslo.messaging
+ python-pankoclient-doc
+ python3-pankoclient
+ python3-wsme
+ rabbitmq-server
+ stx-portieris-helm
+ collectd-extensions
+ kube-cpusets
+ kube-memory
+ monitor-tools
+ stx-kubevirt-app-helm
+ armada
+ base-passwd
+ bash-builtins
+ bash-dbgsym
+ bash-doc
+ bash-static
+ bash
+ centos-debian-compat
+ ceph-base
+ ceph-common
+ ceph-fuse
+ ceph-mds
+ ceph-mgr
+ ceph-mon
+ ceph-osd
+ ceph
+ libcephfs2
+ librados2
+ libradosstriper1
+ librbd1
+ librgw2
+ python3-ceph-argparse
+ python3-ceph
+ python3-cephfs
+ python3-rados
+ python3-rbd
+ python3-rgw
+ radosgw
+ resource-agents
+ containerd
+ crictl
+ isc-dhcp-client
+ isc-dhcp-common
+ dnsmasq-base
+ dnsmasq-utils
+ dnsmasq
+ docker-registry
+ drbd-utils
+ efitools
+ etcd-client
+ etcd-server
+ etcd
+ facter
+ libfacter3.14.12
+ grub-efi-amd64
+ grub-common
+ grub-pc-bin
+ grub-pc
+ grub2-common
+ grub2
+ grubby
+ haproxy
+ haproxy-config
+ helm
+ ifupdown-extra
+ kdump-tools
+ iputils-arping
+ iputils-ping
+ iputils-tracepath
+ isolcpus-device-plugin
+ k8s-pod-recovery
+ k8s-cni-cache-cleanup
+ kubectl-cert-manager
+ dmeventd
+ dmsetup
+ libdevmapper1.02.1
+ libdevmapper-event1.02.1
+ liblvm2cmd2.03
+ lvm2
+ python3-keyrings.alt
+ kubernetes-1.21.8-client
+ kubernetes-1.21.8-kubeadm
+ kubernetes-1.21.8-node
+ kubernetes-1.22.5-client
+ kubernetes-1.22.5-kubeadm
+ kubernetes-1.22.5-node
+ kubernetes-1.23.1-client
+ kubernetes-1.23.1-kubeadm
+ kubernetes-1.23.1-node
+ kubernetes-1.24.4-client
+ kubernetes-1.24.4-kubeadm
+ kubernetes-1.24.4-node
+ kubernetes-1.25.3-client
+ kubernetes-1.25.3-kubeadm
+ kubernetes-1.25.3-node
+ kubernetes-1.26.1-client
+ kubernetes-1.26.1-kubeadm
+ kubernetes-1.26.1-node
+ kubernetes-unversioned
+ ldapscripts
+ libfdt
+ lighttpd-mod-geoip
+ lighttpd-modules-mysql
+ lighttpd
+ linuxptp
+ lldpd
+ lsb-base
+ net-tools
+ ldap-utils
+ libldap-2.4-2
+ libldap-common
+ slapd
+ slapd-ppolicy-check-password
+ libparted-fs-resize0
+ libparted-i18n
+ libparted2
+ parted-doc
+ parted
+ pf-bb-config
+ bond-cni
+ containernetworking-plugins
+ puppet
+ puppet-boolean
+ puppet-module-ceph
+ puppet-dnsmasq
+ puppet-drbd
+ puppet-hash2stuff
+ puppet-module-cristifalcas-etcd
+ puppet-module-keystone
+ puppet-module-horizon
+ puppet-ldap
+ puppet-lvm
+ puppet-network
+ puppet-module-openstacklib
+ puppet-module-oslo
+ puppet-puppi
+ puppet-module-puppetlabs-rabbitmq
+ puppet-module-nanliu-staging
+ puppet-module-puppetlabs-firewall
+ puppet-module-puppetlabs-haproxy
+ puppet-module-puppetlabs-mysql
+ puppet-module-puppetlabs-postgresql
+ puppet-module-puppetlabs-stdlib
+ python3-docker
+ python3-keyring
+ python3-nsenter
+ python3-pkg-resources
+ python3-setuptools
+ python3.9
+ libopenscap25
+ openscap-common
+ openscap-scanner
+ openscap-utils
+ python3-openscap
+ openvswitch-common
+ openvswitch-switch-dpdk
+ openvswitch-switch
+ runc
+ shim-helpers-amd64-signed-template
+ shim-unsigned
+ synce4l
+ libnss-myhostname
+ libpam-systemd
+ libsystemd0
+ libudev1
+ systemd-sysv
+ systemd
+ udev
+ systemd-presets
+ trident-installer
+ watchdog
+ stx-dell-storage-helm
+ playbookconfig
+ distributedcloud-client-dcmanagerclient
+ stx-sriov-fec-operator-helm
+ cgcs-patch-agent
+ cgcs-patch-controller
+ cgcs-patch
+ python3-cgcs-patch
+ enable-dev-patch
+ python3-patch-alarm
+ software
+ cert-alarm
+ cert-mon
+ cgts-client
+ config-gate-worker
+ config-gate
+ controllerconfig
+ sysinv
+ sysinv-agent
+ storageconfig
+ tsconfig
+ workerconfig-standalone
+ stx-audit-helm
+ stx-security-profiles-operator-helm
+ stx-metrics-server-helm
+ stx-node-feature-discovery-helm
+ python3-oidcauthtools
+ stx-oidc-auth-helm
+ mtce-hostw
+ mtce-hwmon
+ mtce-lmon
+ mtce-pmon
+ mtce
+ mtce-compute
+ mtce-control
+ mtce-storage
+ platform-upgrades
+ platform-kickstarts
+ pxe-network-installer
+ build-info
+ ceph-manager
+ collector
+ engtools
+ k8s-coredump
+ logmgmt
+ namespace-utils
+ nfscheck
+ platform-util-controller
+ platform-util-noncontroller
+ platform-util
+ python3-platform-util
+ python3-barbicanclient
+ python3-cephclient
+ stx-extensions
+ update-motd
+ worker-utils
+ audit-config
+ base-files-config
+ containerd-config
+ debian-release-config
+ dhcp-config
+ dnsmasq-config
+ docker-config
+ filesystem-scripts
+ initscripts-config
+ io-scheduler
+ iptables-config
+ iscsi-initiator-utils-config
+ lvm2-config
+ logrotate-config
+ lighttpd-config
+ memcached-custom
+ multipath-config
+ multus-config
+ nfs-utils-config
+ ntp-config
+ openldap-config
+ openssh-config
+ openvswitch-config
+ pam-config
+ python-siteconfig
+ rabbitmq-server-config
+ rsync-config
+ sudo-config
+ syslog-ng-config
+ systemd-config
+ tuned-config
+ stx-harbor-helm
+ stx-vault-helm
+ registry-token-server
+ stx-snmp-helm
+ cgcs-dpdk
+ cgcs-dpdk-apps
+ libvirt-clients
+ libvirt-daemon
+ libvirt-daemon-driver-lxc
+ libvirt-daemon-driver-qemu
+ libvirt-daemon-driver-storage-gluster
+ libvirt-login-shell
+ libvirt0
+ nvidia-vgpu
+ python3-networking-avs
+ python3-vswitchclient
+ python3-vswitchclient-bash-completion
+ qemu-block-extra
+ qemu-guest-agent
+ qemu-system-common
+ qemu-system-data
+ qemu-system-x86
+ qemu-utils
+ vswitch
+ vswitch-init-vswitch
+ wrcp-playbook
+ wrcp-deployment-manager
+ wrcp-deployment-manager-plugins
+ puppet-avs
+ wrcp-theme
+ dm-monitor
+ linux-rt
+
+
+
+
+
\ No newline at end of file
diff --git a/build-tools/stx/patch/EXAMPLES/patch-recipe-sample.xml b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample.xml
new file mode 100644
index 00000000..756e1dbf
--- /dev/null
+++ b/build-tools/stx/patch/EXAMPLES/patch-recipe-sample.xml
@@ -0,0 +1,36 @@
+
+
+ sample_patch
+ 1.0.0
+ 0001
+ starlingx
+ sample patch test
+ Reboot required patch
+ Sample instructions
+ Sample warning
+
+ Y
+ N
+ DEV
+
+
+
+
+
+ scripts/pre-install.sh
+ scripts/post-install.sh
+
+
+
+ sysvinv
+ linux
+ linux-rt
+
+
+
+ curl
+
+
\ No newline at end of file
diff --git a/build-tools/stx/patch/README.md b/build-tools/stx/patch/README.md
new file mode 100644
index 00000000..f1d93bef
--- /dev/null
+++ b/build-tools/stx/patch/README.md
@@ -0,0 +1,89 @@
+# Patch Builder Utility
+
+This utility will build patches based on .deb packages.
+
+### Pre reqs
+
+- Setup a build environment and build all packages/image
+- Make code changes to your packages and build them
+
+### Patch recipe schema
+
+The patch builder requires the following tags in the input xml (or patch recipe)
+
+```xml
+
+
+ sample_patch
+
+ 1.0.0
+
+ 0001
+
+ starlingx
+
+ sample patch test
+
+ Sample description
+
+ Sample instructions
+
+ Sample warning
+
+ Y
+
+ N
+
+ DEV
+
+
+
+
+
+
+ scripts/pre-install.sh
+ scripts/post-install.sh
+
+
+
+ sysvinv
+ linux
+ linux-rt
+
+
+
+
+ curl
+
+
+```
+
+
+### How to build a patch
+
+- Enter the builder container
+```bash
+$ stx shell
+$ cd $MY_REPO/build-tools/stx/patch
+```
+
+- Install py requirements
+```bash
+$ pip install -r requirements.txt
+```
+
+- Update the patch-recipe file. For examples please refer to the `EXAMPLES` folder.
+
+- Update any pre/post script. For examples check refer to the `scripts` folder.
+
+- Build your patch:
+
+```bash
+$ ./patch_builder.py --recipe EXAMPLES\patch-recipe-sample.xml
+```
diff --git a/build-tools/stx/patch/config/patch-recipe-schema.xsd b/build-tools/stx/patch/config/patch-recipe-schema.xsd
new file mode 100644
index 00000000..9855f7f7
--- /dev/null
+++ b/build-tools/stx/patch/config/patch-recipe-schema.xsd
@@ -0,0 +1,44 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/build-tools/stx/patch/fetch_debs.py b/build-tools/stx/patch/fetch_debs.py
new file mode 100644
index 00000000..e935ab6f
--- /dev/null
+++ b/build-tools/stx/patch/fetch_debs.py
@@ -0,0 +1,186 @@
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+'''
+Fetch deb and subdebs from the build system
+'''
+import os
+import sys
+import logging
+import shutil
+
+sys.path.append('..')
+import debsentry
+import repo_manage
+import utils
+
+BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
+
+DEB_CONFIG_DIR = 'stx-tools/debian-mirror-tools/config/'
+PKG_LIST_DIR = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'), DEB_CONFIG_DIR, 'debian/distro')
+
+logger = logging.getLogger('fetch_debs')
+utils.set_logger(logger)
+
+
+class FetchDebs(object):
+ def __init__(self):
+ self.need_dl_stx_pkgs = []
+ self.need_dl_binary_pkgs = []
+ self.output_dir = os.path.join(BUILD_ROOT, 'dl_debs')
+ self.apt_src_file = os.path.join(BUILD_ROOT, 'aptsrc')
+
+ self.setup_apt_source()
+ self.debs_fetcher = repo_manage.AptFetch(logger, self.apt_src_file, self.output_dir)
+
+ def get_debs_clue(self, btype):
+ if btype != 'rt':
+ btype = 'std'
+ return os.path.join(BUILD_ROOT, 'caches', btype + '_debsentry.pkl')
+
+ def get_all_debs(self):
+ all_debs = set()
+ debs_clue_std = self.get_debs_clue('std')
+ debs_clue_rt = self.get_debs_clue('rt')
+ for pkg in self.need_dl_stx_pkgs:
+ subdebs_std = debsentry.get_subdebs(debs_clue_std, pkg, logger)
+ subdebs_rt = debsentry.get_subdebs(debs_clue_rt, pkg, logger)
+ if not subdebs_std and not subdebs_rt:
+ logger.warning('Failed to get subdebs for %s with local debsentry cache', pkg)
+ continue
+ else:
+ if subdebs_std:
+ all_debs.update(set(subdebs_std))
+ if subdebs_rt:
+ all_debs.update(set(subdebs_rt))
+
+ return all_debs
+
+ def setup_apt_source(self):
+ # clean up the output dir
+ if os.path.exists(self.output_dir):
+ shutil.rmtree(self.output_dir)
+
+ os.makedirs(self.output_dir, exist_ok=True)
+
+ try:
+ with open(self.apt_src_file, 'w') as f:
+ repo_url = os.environ.get('REPOMGR_DEPLOY_URL')
+ apt_item = ' '.join(['deb [trusted=yes]', repo_url + 'deb-local-build', 'bullseye', 'main\n'])
+ f.write(apt_item)
+ apt_item = ' '.join(['deb [trusted=yes]', repo_url + 'deb-local-binary', 'bullseye', 'main\n'])
+ f.write(apt_item)
+ logger.debug(f'Created apt source file {self.apt_src_file} to download debs')
+ except Exception as e:
+ logger.error(str(e))
+ logger.error('Failed to create the apt source file')
+ sys.exit(1)
+
+ def fetch_stx_packages(self):
+ '''
+ Download all debs and subdebs from the build system
+ Save the files to $BUILD_ROOT/dl_debs
+ '''
+ dl_debs = self.get_all_debs()
+ if not dl_debs:
+ logger.warn('No STX packages were found')
+ return
+ else:
+ dl_debs_dict = {}
+ for deb in dl_debs:
+ # dl_debs_with_ver.append(deb.replace('_', ' '))
+ name, version = deb.split('_')
+ if name not in dl_debs_dict:
+ dl_debs_dict[name] = version
+ logger.debug('##dldebs:%s', dl_debs_dict)
+
+ # filter list based on stx-std.lst - Depecrated on master, replaced by debian_iso_image.inc on each repo
+ stx_pkg_list_file = self.get_debian_pkg_iso_list()
+
+ debs_to_remove = []
+ for deb in dl_debs_dict.keys():
+ # try to find the deb in the package list
+ if deb not in stx_pkg_list_file:
+ # remove if not found in all lines
+ debs_to_remove.append(deb)
+
+ for deb in debs_to_remove:
+ dl_debs_dict.pop(deb)
+
+ logger.debug(f'Package list after filtering:{dl_debs_dict}')
+
+ logger.info(f'Total debs need to be downloaded: {len(dl_debs_dict)}')
+ dl_debs_with_ver = [f'{k} {v}' for k, v in dl_debs_dict.items()]
+ fetch_ret = self.download(dl_debs_with_ver)
+ dl_bin_debs_dir = os.path.join(self.output_dir, 'downloads/binary')
+ if len(fetch_ret['deb-failed']) == 0:
+ logger.info(f'Successfully downloaded STX debs to {dl_bin_debs_dir}')
+ else:
+ logger.error(f'Failed to downloaded STX debs to {dl_bin_debs_dir}')
+
+ def get_debian_pkg_iso_list(self):
+ pkgs = []
+ cgcs_root_dir = os.environ.get('MY_REPO')
+ package_file_name = 'debian_iso_image.inc'
+ print(cgcs_root_dir)
+ for root, dirs, files in os.walk(cgcs_root_dir):
+ for file in files:
+ if file == package_file_name:
+ with open(os.path.join(root, package_file_name), 'r') as f:
+ pkgs.extend(line.strip() for line in f if line.strip() and not line.startswith('#'))
+ return pkgs
+
+ def fetch_external_binaries(self):
+ '''
+ Download all binaries from the build system
+ apt_item = apt_item + ' '.join(['deb [trusted=yes]', repo_url + 'deb-local-binary', 'bullseye', 'main\n'])
+ '''
+ # Get debs from base-bullseye.lst
+ # https://opendev.org/starlingx/tools/src/branch/master/debian-mirror-tools/config/debian/common/base-bullseye.lst
+ if not self.need_dl_binary_pkgs:
+ logger.debug("No binary packages to download")
+ return
+
+ all_debs = set()
+ package_list = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
+ 'stx-tools/debian-mirror-tools/config/debian/common/base-bullseye.lst')
+ # find pkgs in the list file
+ logger.debug(f'Packages to find {self.need_dl_binary_pkgs}')
+ for pkg in self.need_dl_binary_pkgs:
+ logger.debug(f'checking {pkg}')
+ with open(package_list, 'r') as f:
+ for line in f.readlines():
+ if pkg in line:
+ pkg_entry = ' '.join(line.split()[:2])
+ all_debs.add(pkg_entry)
+ break
+
+ logger.debug('Binary packages to download:%s', all_debs)
+ fetch_ret = self.download(all_debs)
+ dl_bin_debs_dir = os.path.join(self.output_dir, 'downloads/binary')
+ if len(fetch_ret['deb-failed']) == 0:
+ logger.info(f'Successfully downloaded external debs to {dl_bin_debs_dir} \n')
+ else:
+ logger.info(f'Failed to downloaded external debs to {dl_bin_debs_dir} \n')
+
+ def download(self, all_debs):
+ try:
+ logger.debug('Downloading debs...')
+ fetch_ret = self.debs_fetcher.fetch_pkg_list(all_debs)
+ except Exception as e:
+ logger.error(str(e))
+ logger.error('Exception has when fetching debs with repo_manage')
+ sys.exit(1)
+ return fetch_ret
+
+
+if __name__ == '__main__':
+ fetch_debs = FetchDebs()
+ # set the packages you want to download
+ fetch_debs.need_dl_std_pkgs = ['sysinv']
+ fetch_debs.need_dl_rt_pkgs = ['']
+ fetch_debs.need_dl_binary_pkgs = ['tzdata', 'curl', 'apache2']
+
+ fetch_debs.fetch_stx_packages()
diff --git a/build-tools/stx/patch/metadata.py b/build-tools/stx/patch/metadata.py
new file mode 100644
index 00000000..867572f0
--- /dev/null
+++ b/build-tools/stx/patch/metadata.py
@@ -0,0 +1,219 @@
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+'''
+Class that holds the patch metadata information
+'''
+
+import json
+import logging
+import os
+import sys
+sys.path.append('..')
+import utils
+import xml.etree.ElementTree as ET
+from lxml import etree
+from xml.dom import minidom
+
+logger = logging.getLogger('metadata_parser')
+utils.set_logger(logger)
+
+INPUT_XML_SCHEMA = 'config/patch-recipe-schema.xsd'
+
+# Metadata components
+PATCH_ROOT_TAG = 'patch'
+PATCH_ID = 'patch_id'
+SW_VERSION = 'sw_version'
+PATCH_VERSION = 'patch_version'
+COMPONENT = 'component'
+STATUS = 'status'
+SUMMARY = 'summary'
+DESCRIPTION = 'description'
+INSTALL_INSTRUCTIONS = 'install_instructions'
+WARNINGS = 'warnings'
+REBOOT_REQUIRED = 'reboot_required'
+PRE_INSTALL = 'pre_install'
+POST_INSTALL = 'post_install'
+UNREMOVABLE = 'unremovable'
+REQUIRES = 'requires'
+REQUIRES_PATCH_ID = 'req_patch_id'
+PACKAGES = 'packages'
+STX_PACKAGES = 'stx_packages'
+BINARY_PACKAGES = 'binary_packages'
+SEMANTICS = 'semantics'
+
+
+class PatchMetadata(object):
+ def __init__(self, patch_recipe_file):
+ self.patch_recipe_file = patch_recipe_file
+ self.stx_packages = []
+ self.binary_packages = []
+ self.requires = []
+
+ def __str__(self):
+ return json.dumps(self.__dict__)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __add_text_tag_to_xml(self, parent, name, text):
+ """
+ Utility function for adding a text tag to an XML object
+ :param parent: Parent element
+ :param name: Element name
+ :param text: Text value
+ :return:The created element
+ """
+ tag = ET.SubElement(parent, name)
+ tag.text = text
+ return tag
+
+ def __xml_to_dict(self, element):
+ """
+ Converts xml into a dict
+ :param xml element
+ """
+ if len(element) == 0:
+ return element.text.strip() if element.text else ""
+ result = {}
+ for child in element:
+ child_data = self.__xml_to_dict(child)
+ # Verify if child.tag is comment
+ if child.tag == etree.Comment:
+ continue
+ if child.tag in result:
+ if isinstance(result[child.tag], list):
+ result[child.tag].append(child_data)
+ else:
+ result[child.tag] = [result[child.tag], child_data]
+ else:
+ result[child.tag] = child_data
+ return result
+
+ def generate_patch_metadata(self, file_path):
+ # Generate patch metadata.xml
+ # strip path from pre_install and post_install scripts
+ self.pre_install = self.pre_install.split('/')[-1]
+ self.post_install = self.post_install.split('/')[-1]
+
+ top_tag = ET.Element(PATCH_ROOT_TAG)
+ self.__add_text_tag_to_xml(top_tag, PATCH_ID, self.patch_id)
+ self.__add_text_tag_to_xml(top_tag, SW_VERSION, self.sw_version)
+ self.__add_text_tag_to_xml(top_tag, PATCH_VERSION, self.patch_version)
+ self.__add_text_tag_to_xml(top_tag, COMPONENT, self.component)
+ self.__add_text_tag_to_xml(top_tag, SUMMARY, self.summary)
+ self.__add_text_tag_to_xml(top_tag, DESCRIPTION, self.description)
+ self.__add_text_tag_to_xml(top_tag, INSTALL_INSTRUCTIONS, self.install_instructions)
+ self.__add_text_tag_to_xml(top_tag, WARNINGS, self.warnings)
+ self.__add_text_tag_to_xml(top_tag, STATUS, self.status)
+
+ if self.unremovable:
+ self.__add_text_tag_to_xml(top_tag, UNREMOVABLE, 'Y')
+ else:
+ self.__add_text_tag_to_xml(top_tag, UNREMOVABLE, 'N')
+
+ if self.reboot_required:
+ self.__add_text_tag_to_xml(top_tag, REBOOT_REQUIRED, 'Y')
+ else:
+ self.__add_text_tag_to_xml(top_tag, REBOOT_REQUIRED, 'N')
+
+ self.__add_text_tag_to_xml(top_tag, SEMANTICS, self.semantics)
+
+ requires_atg = ET.SubElement(top_tag, REQUIRES)
+ for req_patch in sorted(self.requires):
+ self.__add_text_tag_to_xml(requires_atg, REQUIRES_PATCH_ID, req_patch)
+
+ self.__add_text_tag_to_xml(top_tag, PRE_INSTALL, self.pre_install)
+ self.__add_text_tag_to_xml(top_tag, POST_INSTALL, self.post_install)
+
+ packages_tag = ET.SubElement(top_tag, PACKAGES)
+ for package in sorted(self.debs):
+ self.__add_text_tag_to_xml(packages_tag, "deb", package)
+
+ # Save xml
+ outfile = open(file_path, "w")
+ tree = ET.tostring(top_tag)
+ outfile.write(minidom.parseString(tree).toprettyxml(indent=" "))
+
+ def __tag_to_list(self, tag_content):
+ if type(tag_content) != list:
+ return [tag_content]
+ return tag_content
+
+ def parse_metadata(self, patch_recipe):
+ self.patch_id = patch_recipe[PATCH_ID]
+ self.sw_version = patch_recipe[SW_VERSION]
+ self.patch_version = patch_recipe[PATCH_VERSION]
+ self.component = patch_recipe[COMPONENT]
+ self.summary = patch_recipe[SUMMARY]
+ self.description = patch_recipe[DESCRIPTION]
+ if 'package' in patch_recipe[STX_PACKAGES]:
+ self.stx_packages = self.__tag_to_list(patch_recipe[STX_PACKAGES]['package'])
+ if 'package' in patch_recipe[BINARY_PACKAGES]:
+ self.binary_packages = self.__tag_to_list(patch_recipe[BINARY_PACKAGES]['package'])
+ self.install_instructions = patch_recipe[INSTALL_INSTRUCTIONS]
+ self.warnings = patch_recipe[WARNINGS]
+ self.reboot_required = patch_recipe[REBOOT_REQUIRED]
+ self.pre_install = self.check_script_path(patch_recipe[PRE_INSTALL])
+ self.post_install = self.check_script_path(patch_recipe[POST_INSTALL])
+ self.unremovable = patch_recipe[UNREMOVABLE]
+ self.status = patch_recipe[STATUS]
+ if 'id' in patch_recipe[REQUIRES]:
+ self.requires = self.__tag_to_list(patch_recipe[REQUIRES]['id'])
+ self.semantics = patch_recipe[SEMANTICS]
+ self.debs = []
+
+ if self.status != 'DEV' and self.status != 'REL':
+ raise Exception('Supported status are DEV and REL, selected')
+
+ logger.debug("Metadata parsed: %s", self)
+
+ def parse_input_xml_data(self):
+ # Parse and validate the XML
+ try:
+ xml_tree = etree.parse(self.patch_recipe_file)
+ except Exception as e:
+ logger.error(f"Error while parsing the input xml {e}")
+ sys.exit(1)
+
+ root = xml_tree.getroot()
+ xml_schema = etree.XMLSchema(etree.parse(INPUT_XML_SCHEMA))
+
+ # Validate the XML against the schema
+ is_valid = xml_schema.validate(root)
+ xml_dict = {}
+ if is_valid:
+ logger.info("XML is valid against the schema.")
+ xml_dict = self.__xml_to_dict(root)
+ else:
+ logger.error("XML is not valid against the schema. Validation errors:")
+ for error in xml_schema.error_log:
+ logger.error(f"Line {error.line}: {error.message}")
+ sys.exit(1)
+
+ print(xml_dict)
+ self.parse_metadata(xml_dict)
+
+
+ def check_script_path(self, script_path):
+ if not script_path:
+ # No scripts provided
+ return ''
+
+ if not os.path.isabs(script_path):
+ script_path = os.path.join(os.getcwd(), script_path)
+
+ if not os.path.isfile(script_path):
+ erro_msg = f"Install script {script_path} not found"
+ logger.error(erro_msg)
+ raise FileNotFoundError(erro_msg)
+
+ return script_path
+
+
+if __name__ == "__main__":
+ patch_recipe_file = "EXAMPLES/patch-recipe-sample.xml"
+ patch_metadata = PatchMetadata(patch_recipe_file)
+ patch_metadata.parse_input_xml_data()
diff --git a/build-tools/stx/patch/patch_builder.py b/build-tools/stx/patch/patch_builder.py
new file mode 100755
index 00000000..34095246
--- /dev/null
+++ b/build-tools/stx/patch/patch_builder.py
@@ -0,0 +1,191 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+'''
+Builds a Debian patch
+'''
+
+import hashlib
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+import click
+import fetch_debs
+import metadata
+from signing.patch_signing import sign_files
+
+sys.path.append('..')
+import utils
+
+logger = logging.getLogger('patch_builder')
+utils.set_logger(logger)
+
+# Patch signature files
+detached_signature_file = "signature.v2"
+mdsum_signature_file = "signature"
+
+# Patch output directory
+BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
+PATCH_OUTPUT = os.path.join(BUILD_ROOT, "patch_output")
+
+
+class PatchBuilder(object):
+ def __init__(self, patch_recipe_file):
+ self.metadata = metadata.PatchMetadata(patch_recipe_file)
+ self.metadata.parse_input_xml_data()
+ self.fetch_debs = fetch_debs.FetchDebs()
+ self.fetch_debs.need_dl_stx_pkgs = self.metadata.stx_packages
+ self.fetch_debs.need_dl_binary_pkgs = self.metadata.binary_packages
+
+ def get_md5(self, path):
+ '''
+ Utility function for generating the md5sum of a file
+ :param path: Path to file
+ '''
+ md5 = hashlib.md5()
+ block_size = 8192
+ with open(path, 'rb') as f:
+ for chunk in iter(lambda: f.read(block_size), b''):
+ md5.update(chunk)
+ return int(md5.hexdigest(), 16)
+
+ def build_patch(self):
+ logger.info(f"Generating patch {self.metadata.patch_id}")
+ # Fetch debs from metadata and
+ # Create software.tar, metadata.tar and signatures
+ # Create a temporary working directory
+ logger.debug("Fetching debs...")
+ self.fetch_debs.fetch_stx_packages()
+ self.fetch_debs.fetch_external_binaries()
+ # verify if dir is not empty
+ dl_dir = os.path.join(self.fetch_debs.output_dir, "downloads", "binary")
+ if not os.listdir(dl_dir):
+ logger.error("No debs fetched")
+ return False
+ logger.info("################ PATCH BUILD ################")
+ logger.info("Download completed, building our patch")
+ tmpdir = tempfile.mkdtemp(prefix="patch_")
+ os.chdir(tmpdir)
+ tar = tarfile.open("software.tar", "w")
+ # copy all files from dl_dir into the tar
+ for file in os.listdir(dl_dir):
+ logger.info(f"Saving file {file}")
+ tar.add(os.path.join(dl_dir, file), arcname=file)
+ # append deb name into metadata
+ self.metadata.debs.append(file)
+ tar.close()
+
+ pre_install = self.metadata.pre_install
+ post_install = self.metadata.post_install
+ # pre/post install scripts
+ if pre_install:
+ logger.debug(f"Copying pre-install script: {pre_install}")
+ self.copy_script(pre_install)
+
+ if post_install:
+ logger.debug(f"Copying post-install script: {post_install}")
+ self.copy_script(post_install)
+
+ if not pre_install and not post_install and self.metadata.reboot_required == 'N':
+ logger.warn("In service patch without restart scripts provided")
+
+ # Generate metadata.xml
+ logger.debug("Generating metadata file")
+ self.metadata.generate_patch_metadata("metadata.xml")
+ tar = tarfile.open("metadata.tar", "w")
+ tar.add("metadata.xml")
+ tar.close()
+ os.remove("metadata.xml")
+
+ # Pack .patch file
+ self.__sign_and_pack(f'{self.metadata.patch_id}.patch')
+
+ def copy_script(self, install_script):
+ if not os.path.isfile(install_script):
+ erro_msg = f"Install script {install_script} not found"
+ logger.error(erro_msg)
+ raise FileNotFoundError(erro_msg)
+
+ shutil.copy(install_script, ".")
+
+ def __sign_and_pack(self, patch_file):
+ """
+ Generates the patch signatures and pack the .patch file
+ :param patch_file .patch file full path
+ """
+ filelist = ["metadata.tar", "software.tar"]
+
+ if self.metadata.pre_install:
+ filelist.append(self.metadata.pre_install)
+
+ if self.metadata.post_install:
+ filelist.append(self.metadata.post_install)
+
+ # Generate the local signature file
+ logger.debug(f"Generating signature for patch files {filelist}")
+ sig = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
+ for f in filelist:
+ sig ^= self.get_md5(f)
+
+ sigfile = open(mdsum_signature_file, "w")
+ sigfile.write("%x" % sig)
+ sigfile.close()
+
+ # this comes from patch_functions write_patch
+ # Generate the detached signature
+ #
+ # Note: if cert_type requests a formal signature, but the signing key
+ # is not found, we'll instead sign with the "dev" key and
+ # need_resign_with_formal is set to True.
+ need_resign_with_formal = sign_files(
+ filelist,
+ detached_signature_file,
+ cert_type=None)
+
+ logger.info(f"Formal signing status: {need_resign_with_formal}")
+
+ # Save files into .patch
+ files = [f for f in os.listdir('.') if os.path.isfile(f)]
+
+ if not os.path.exists(PATCH_OUTPUT):
+ os.makedirs(PATCH_OUTPUT)
+ patch_full_path = os.path.join(PATCH_OUTPUT, patch_file)
+ tar = tarfile.open(patch_full_path, "w:gz")
+ for file in files:
+ logger.info(f"Saving file {file}")
+ tar.add(file)
+ tar.close()
+ logger.info(f"Patch file created {patch_full_path}")
+
+ def __sign_official_patches(self, patch_file):
+ """
+ Sign formal patch
+ Called internally once a patch is created and formal flag is set to true
+ :param patch_file full path to the patch file
+ """
+ logger.info("Signing patch %s", patch_file)
+ try:
+ subprocess.check_call(["sign_patch_formal.sh", patch_file])
+ except subprocess.CalledProcessError as e:
+ logger.exception("Failed to sign official patch. Call to sign_patch_formal.sh process returned non-zero exit status %i", e.returncode)
+ except FileNotFoundError:
+ logger.exception("sign_patch_formal.sh not found, make sure $STX_BUILD_HOME/repo/cgcs-root/build-tools is in the $PATH")
+
+
+@click.command()
+@click.option('--recipe', help='Patch recipe input XML file, examples are available under EXAMLES directory',
+ required=True)
+def build(recipe):
+ patch_builder = PatchBuilder(recipe)
+ patch_builder.build_patch()
+
+if __name__ == '__main__':
+ build()
\ No newline at end of file
diff --git a/build-tools/stx/patch/requirements.txt b/build-tools/stx/patch/requirements.txt
new file mode 100644
index 00000000..9a73a986
--- /dev/null
+++ b/build-tools/stx/patch/requirements.txt
@@ -0,0 +1,3 @@
+click
+lxml
+pycryptodomex
\ No newline at end of file
diff --git a/build-tools/stx/patch/scripts/post-install.sh b/build-tools/stx/patch/scripts/post-install.sh
new file mode 100644
index 00000000..072dad49
--- /dev/null
+++ b/build-tools/stx/patch/scripts/post-install.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+#
+# The patching subsystem provides a patch-functions bash source file
+# with useful function and variable definitions.
+#
+. /etc/patching/patch-functions
+
+#
+# Declare an overall script return code
+#
+declare -i GLOBAL_RC=$PATCH_STATUS_OK
+
+echo "Post-install hook script"
+
+#
+# Exit the script with the overall return code
+#
+exit $GLOBAL_RC
diff --git a/build-tools/stx/patch/scripts/pre-install.sh b/build-tools/stx/patch/scripts/pre-install.sh
new file mode 100644
index 00000000..31996587
--- /dev/null
+++ b/build-tools/stx/patch/scripts/pre-install.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+#
+# The patching subsystem provides a patch-functions bash source file
+# with useful function and variable definitions.
+#
+. /etc/patching/patch-functions
+
+#
+# Declare an overall script return code
+#
+declare -i GLOBAL_RC=$PATCH_STATUS_OK
+
+echo "Pre hook script"
+
+#
+# Exit the script with the overall return code
+#
+exit $GLOBAL_RC
diff --git a/build-tools/stx/patch/signing/certificates.py b/build-tools/stx/patch/signing/certificates.py
new file mode 100644
index 00000000..871f5712
--- /dev/null
+++ b/build-tools/stx/patch/signing/certificates.py
@@ -0,0 +1,51 @@
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+
+dev_certificate = b"""-----BEGIN CERTIFICATE-----
+ MIIDejCCAmKgAwIBAgICEAQwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
+ EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
+ SW5jLjAeFw0xNzA4MTgxNDM3MjlaFw0yNzA4MTYxNDM3MjlaMEExCzAJBgNVBAYT
+ AkNBMRAwDgYDVQQIDAdPbnRhcmlvMSAwHgYDVQQKDBdXaW5kIFJpdmVyIFN5c3Rl
+ bXMsIEluYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALcs0/Te6x69
+ lxQOxudrF+uSC5F9r5bKUnZNWUKHyXKlN4SzZgWGs+fb/DqXIm7piuoQ6GH7GEQd
+ BEN1j/bwp30LZlv0Ur+8jhCvEdqsIP3vUXfv7pv0bomVs0Q8ZRI/FYZhjxYlyFKr
+ gZFV9WPP8S9SwfClHjaYRUudvwvjHHnnnkZ9blVFbXU0Xe83A8fWd0HNqAU1TlmK
+ 4CeSi4FI4aRKiXJnOvgv2UoJMI57rBIVKYRUH8uuFpPofOwjOM/Rd6r3Ir+4/CX6
+ +/NALOBIEN6M05ZzoiyiH8NHELknQBqzNs0cXObJWpaSinAOcBnPCc7DNRwgQzjR
+ SdcE9FG1+LcCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3Bl
+ blNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFDRbal2KxU0hQyv4
+ MVnWrW96+aWoMB8GA1UdIwQYMBaAFJaLO1x8+jti7V6pLGbUyqpy0M36MA0GCSqG
+ SIb3DQEBCwUAA4IBAQBmcPFZzEoPtuMPCFvJ/0cmngp8yvCGxWz3JEDkdGYSCVGs
+ TG5e9DeltaHOk6yLvZSRY1so30GQnyB9q8v4DwEGVslKg8u9w/WEU81wl6Q2FZ5s
+ XRP6TASQ0Lbg9e4b3bnTITJJ8jT/zF29NaohgC2fg0UwVuldZLfa7FihJB4//OC1
+ UdNEcmdqTVRqN2oco1n3ZUWKXvG2AvGsoiqu+lsWX1MXacoFvJexSACLrUvOoXMW
+ i38Ofp7XMCAm3rM0cXv7Uc9WCrgnTWbEvDgjGfRAmcM9moWGoWX6E46Xkojpkfle
+ Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u
+ -----END CERTIFICATE-----"""
+
+formal_certificate = b"""-----BEGIN CERTIFICATE-----
+ MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
+ EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
+ SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT
+ AkNBMRAwDgYDVQQIDAdPbnRhcmlvMSEwHwYDVQQKDBhXaW5kIFJpdmVyIFN5c3Rl
+ bXMsIEluYy4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+0fS8ybg8
+ M37lW+lcR9LmQAR2zUJdbnl2L0fj3W/7W+PMm3mJWeQDTf19wf+qHHrgEkjxGp10
+ BSXWZYdPyCdOjAay/Ew1s/waFeAQZpf4vv/9D1Y/4sVkqct9ibo5NVgvVsjqKVnX
+ IVhyzHlhBSUqYhZlS/SOx8JcLQWSUMJoP2XR4Tv28xIXi0Fuyp8QBwUmSwmvfPy4
+ 0yxzfON/b8kHld5aTY353KLXh/5YWsn1zRlOYfS1OuJk4LGjm6HvmZtxPNUZk4vI
+ NA24rH4FKkuxyM3x8aPi3LE4G6GSrJDuNi28xzOj864rlFoyLODy/mov1YMR/g4k
+ d3mG6UbRckPxAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9w
+ ZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBTjyMN/AX07rEmB
+ 6sz6pnyt/m+eSzAfBgNVHSMEGDAWgBSWiztcfPo7Yu1eqSxm1MqqctDN+jANBgkq
+ hkiG9w0BAQsFAAOCAQEASpyCu/adGTvNjyy/tV+sL/kaVEKLA7q36HUrzQkTjMPX
+ y8L8PVZoeWprkz7cvYTyHmVTPLBvFkGEFVn8LWi9fTTp/UrHnxw6fvb+V78mOypi
+ 4A1aU9+dh3L6arpd4jZ4hDiLhEClesGCYVTVBdsrh3zSOc51nT4hosyBVpRd/VgQ
+ jhGJBBMEXASZceady4ajK5jnR3wF8oW/he4NYF97qh8WWKVsIYbwgLS0rT58q7qq
+ vpjPxMOahUdACkyPyt/XJICTlkanVD7KgG3oLWpc+3FWPHGr+F7mspPLZqUcEFDV
+ bGF+oDJ7p/tqHsNvPlRDVGqh0QdiAkKeS/SJC9jmAw==
+ -----END CERTIFICATE-----
+ """
+
diff --git a/build-tools/stx/patch/signing/patch_signing.py b/build-tools/stx/patch/signing/patch_signing.py
new file mode 100644
index 00000000..1ede6c78
--- /dev/null
+++ b/build-tools/stx/patch/signing/patch_signing.py
@@ -0,0 +1,99 @@
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+
+import logging
+import os
+import signing.patch_verify as patch_verify
+import utils
+
+from Cryptodome.Signature import PKCS1_PSS
+from Cryptodome.Hash import SHA256
+
+
+logger = logging.getLogger('signing')
+utils.set_logger(logger)
+
+# To save memory, read and hash 1M of files at a time
+default_blocksize = 1 * 1024 * 1024
+
+# When we sign patches, look for private keys in the following paths
+#
+# The (currently hardcoded) path on the signing server will be replaced
+# by the capability to specify filename from calling function.
+private_key_files = {
+ patch_verify.cert_type_formal_str: '/signing/keys/formal-private-key.pem',
+ patch_verify.cert_type_dev_str: os.path.expandvars(
+ '$MY_REPO/build-tools/signing/dev-private-key.pem')
+}
+
+
+def sign_files(filenames, signature_file, private_key=None, cert_type=None):
+ """
+ Utility function for signing data in files.
+ :param filenames: A list of files containing the data to be signed
+ :param signature_file: The name of the file to which the signature will be
+ stored
+ :param private_key: If specified, sign with this private key. Otherwise,
+ the files in private_key_files will be searched for
+ and used, if found.
+ :param cert_type: If specified, and private_key is not specified, sign
+ with a key of the specified type. e.g. 'dev' or 'formal'
+ """
+
+ # Hash the data across all files
+ blocksize = default_blocksize
+ data_hash = SHA256.new()
+ for filename in filenames:
+ with open(filename, 'rb') as infile:
+ data = infile.read(blocksize)
+ while len(data) > 0:
+ data_hash.update(data)
+ data = infile.read(blocksize)
+
+ # Find a private key to use, if not already provided
+ need_resign_with_formal = False
+ if private_key is None:
+ if cert_type is not None:
+ # A Specific key is asked for
+ assert (cert_type in list(private_key_files)
+ ), "cert_type=%s is not a known cert type" % cert_type
+ dict_key = cert_type
+ filename = private_key_files[dict_key]
+ logger.info(f'Cert type "{cert_type}": Checking to see if {filename} exists')
+ if not os.path.exists(filename) and dict_key == patch_verify.cert_type_formal_str:
+ # The formal key is asked for, but is not locally available,
+ # substitute the dev key, and we will try to resign with the formal later.
+ dict_key = patch_verify.cert_type_dev_str
+ filename = private_key_files[dict_key]
+ need_resign_with_formal = True
+ logger.warn('Formal key not found, using development keys')
+
+ if os.path.exists(filename):
+ # print 'Getting private key from ' + filename + '\n'
+ private_key = patch_verify.read_RSA_key(
+ open(filename, 'rb').read())
+ else:
+ # Search for available keys
+ for dict_key in private_key_files.keys():
+ filename = private_key_files[dict_key]
+ # print 'Search for available keys: Checking to see if ' + filename + ' exists\n'
+ if os.path.exists(filename):
+ # print 'Getting private key from ' + filename + '\n'
+ private_key = patch_verify.read_RSA_key(
+ open(filename, 'rb').read())
+
+ assert (private_key is not None), "Could not find signing key"
+
+ # Encrypt the hash (sign the data) with the key we find
+ signer = PKCS1_PSS.new(private_key)
+ signature = signer.sign(data_hash)
+
+ # Save it
+ with open(signature_file, 'wb') as outfile:
+ outfile.write(signature)
+
+ return need_resign_with_formal
+
diff --git a/build-tools/stx/patch/signing/patch_verify.py b/build-tools/stx/patch/signing/patch_verify.py
new file mode 100644
index 00000000..bf7eb475
--- /dev/null
+++ b/build-tools/stx/patch/signing/patch_verify.py
@@ -0,0 +1,191 @@
+#
+# Copyright (c) 2023 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+
+import os
+import logging
+
+from Cryptodome.Signature import PKCS1_v1_5
+from Cryptodome.Signature import PKCS1_PSS
+from Cryptodome.Hash import SHA256
+from Cryptodome.PublicKey import RSA
+from Cryptodome.Util.asn1 import DerSequence
+from binascii import a2b_base64
+
+from signing.certificates import dev_certificate
+from signing.certificates import formal_certificate
+
+# To save memory, read and hash 1M of files at a time
+default_blocksize = 1 * 1024 * 1024
+
+dev_certificate_marker = '/etc/pki/wrs/dev_certificate_enable.bin'
+DEV_CERT_CONTENTS = b'Titanium patching'
+LOG = logging.getLogger('main_logger')
+
+cert_type_dev_str = 'dev'
+cert_type_formal_str = 'formal'
+cert_type_dev = [cert_type_dev_str]
+cert_type_formal = [cert_type_formal_str]
+cert_type_all = [cert_type_dev_str, cert_type_formal_str]
+
+
+def verify_hash(data_hash, signature_bytes, certificate_list):
+ """
+ Checks that a hash's signature can be validated against an approved
+ certificate
+ :param data_hash: A hash of the data to be validated
+ :param signature_bytes: A pre-generated signature (typically, the hash
+ encrypted with a private key)
+ :param certificate_list: A list of approved certificates or public keys
+ which the signature is validated against
+ :return: True if the signature was validated against a certificate
+ """
+ verified = False
+ for cert in certificate_list:
+ if verified:
+ break
+ pub_key = read_RSA_key(cert)
+ pub_key.exportKey()
+
+ # PSS is the recommended signature scheme, but some tools (like OpenSSL)
+ # use the older v1_5 scheme. We try to validate against both.
+ #
+ # We use PSS for patch validation, but use v1_5 for ISO validation
+ # since we want to generate detached sigs that a customer can validate
+ # OpenSSL
+ verifier = PKCS1_PSS.new(pub_key)
+ try:
+ verified = verifier.verify(data_hash, signature_bytes) # pylint: disable=not-callable
+ except ValueError:
+ verified = False
+
+ if not verified:
+ verifier = PKCS1_v1_5.new(pub_key)
+ try:
+ verified = verifier.verify(data_hash, signature_bytes) # pylint: disable=not-callable
+ except ValueError:
+ verified = False
+
+ return verified
+
+
+def get_public_certificates_by_type(cert_type=None):
+ """
+ Builds a list of accepted certificates which can be used to validate
+ further things. This list may contain multiple certificates depending on
+ the configuration of the system and the value of cert_type.
+
+ :param cert_type: A list of strings, certificate types to include in list
+ 'formal' - include formal certificate if available
+ 'dev' - include developer certificate if available
+ :return: A list of certificates in PEM format
+ """
+
+ if cert_type is None:
+ cert_type = cert_type_all
+
+ cert_list = []
+
+ if cert_type_formal_str in cert_type:
+ cert_list.append(formal_certificate)
+
+ if cert_type_dev_str in cert_type:
+ cert_list.append(dev_certificate)
+
+ return cert_list
+
+
+def get_public_certificates():
+ """
+ Builds a list of accepted certificates which can be used to validate
+ further things. This list may contain multiple certificates depending on
+ the configuration of the system (for instance, should we include the
+ developer certificate in the list).
+ :return: A list of certificates in PEM format
+ """
+ cert_list = [formal_certificate]
+
+ # We enable the dev certificate based on the presence of a file. This file
+ # contains a hash of an arbitrary string ('Titanum patching') which has been
+ # encrypted with our formal private key. If the file is present (and valid)
+ # then we add the developer key to the approved certificates list
+ if os.path.exists(dev_certificate_marker):
+ with open(dev_certificate_marker, 'rb') as infile:
+ signature = infile.read()
+ data_hash = SHA256.new(DEV_CERT_CONTENTS)
+ if verify_hash(data_hash, signature, cert_list):
+ cert_list.append(dev_certificate)
+ else:
+ msg = "Invalid data found in " + dev_certificate_marker
+ LOG.error(msg)
+
+ return cert_list
+
+
+def read_RSA_key(key_data):
+ """
+ Utility function for reading an RSA key half from encoded data
+ :param key_data: PEM data containing raw key or X.509 certificate
+ :return: An RSA key object
+ """
+ try:
+ # Handle data that is just a raw key
+ key = RSA.importKey(key_data)
+ except ValueError:
+ # The RSA.importKey function cannot read X.509 certificates directly
+ # (depending on the version of the Crypto library). Instead, we
+ # may need to extract the key from the certificate before building
+ # the key object
+ #
+ # We need to strip the BEGIN and END lines from PEM first
+ x509lines = key_data.replace(' ', '').split()
+ x509text = ''.join(x509lines[1:-1])
+ x509data = DerSequence()
+ x509data.decode(a2b_base64(x509text))
+
+ # X.509 contains a few parts. The first part (index 0) is the
+ # certificate itself, (TBS or "to be signed" cert) and the 7th field
+ # of that cert is subjectPublicKeyInfo, which can be imported.
+ # RFC3280
+ tbsCert = DerSequence()
+ tbsCert.decode(x509data[0])
+
+ # Initialize RSA key from the subjectPublicKeyInfo field
+ key = RSA.importKey(tbsCert[6])
+ return key
+
+
+def verify_files(filenames, signature_file, cert_type=None):
+ """
+ Verify data files against a detached signature.
+ :param filenames: A list of files containing the data which was signed
+ :param public_key_file: A file containing the public key or certificate
+ corresponding to the key which signed the data
+ :param signature_file: The name of the file containing the signature
+ :param cert_type: Only use specified certififcate type to verify (dev/formal)
+ :return: True if the signature was verified, False otherwise
+ """
+
+ # Hash the data across all files
+ blocksize = default_blocksize
+ data_hash = SHA256.new()
+ for filename in filenames:
+ with open(filename, 'rb') as infile:
+ data = infile.read(blocksize)
+ while len(data) > 0:
+ data_hash.update(data)
+ data = infile.read(blocksize)
+
+ # Get the signature
+ with open(signature_file, 'rb') as sig_file:
+ signature_bytes = sig_file.read()
+
+ # Verify the signature
+ if cert_type is None:
+ certificate_list = get_public_certificates()
+ else:
+ certificate_list = get_public_certificates_by_type(cert_type=cert_type)
+ return verify_hash(data_hash, signature_bytes, certificate_list)
+