stx: discover buildable packages
Remove hard coded lists of build types and layers. Discover all buildable layers and build types by scanning the source code base for config files. Discover the build order of layers and build types from config files Allow options to reduce the build set to specific subset of layers or build-types Story: 2008862 Task: 43154 Depends-On: https://review.opendev.org/c/starlingx/root/+/832145 Depends-On: https://review.opendev.org/c/starlingx/tools/+/832704 Signed-off-by: Scott Little <scott.little@windriver.com> Signed-off-by: hbai <haiqing.bai@windriver.com> Change-Id: I3e6ab41ed79877a926f3adc25c4058436bbccc17
This commit is contained in:
parent
0a0f53b526
commit
c2cc10133c
1
build-tools/stx/.gitignore
vendored
Normal file
1
build-tools/stx/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
/__pycache__
|
@ -17,6 +17,7 @@
|
||||
import argparse
|
||||
import debrepack
|
||||
import debsentry
|
||||
import discovery
|
||||
import dsc_depend
|
||||
import dsccache
|
||||
import logging
|
||||
@ -31,6 +32,7 @@ import time
|
||||
import utils
|
||||
import yaml
|
||||
|
||||
|
||||
BUILDER_URL = os.environ.get('BUILDER_URL')
|
||||
REPOMGR_URL = os.environ.get('REPOMGR_URL')
|
||||
BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
|
||||
@ -38,8 +40,11 @@ STX_ROOT = os.environ.get('MY_REPO_ROOT_DIR')
|
||||
PKGBUILDER_ROOT = "/localdisk/pkgbuilder"
|
||||
USER = os.environ.get('MYUNAME')
|
||||
PROJECT = os.environ.get('PROJECT')
|
||||
|
||||
# TODO - Do we want a seperate repo for each layer? each build type?
|
||||
REPO_BUILD = 'deb-local-build'
|
||||
REPO_SOURCE = 'deb-local-source'
|
||||
|
||||
# Listed all stx source layers which contains 'debian_pkg_dirs'
|
||||
STX_SOURCE_REPOS = [
|
||||
'SDO-rv-service',
|
||||
@ -78,16 +83,23 @@ STX_SOURCE_REPOS = [
|
||||
'utilities',
|
||||
'vault-armada-app',
|
||||
]
|
||||
STX_LAYERS = ['distro', 'flock']
|
||||
|
||||
STX_DEFAULT_DISTRO = discovery.STX_DEFAULT_DISTRO
|
||||
STX_DEFAULT_BUILD_TYPE = discovery.STX_DEFAULT_BUILD_TYPE
|
||||
STX_DEFAULT_BUILD_TYPE_LIST = discovery.STX_DEFAULT_BUILD_TYPE_LIST
|
||||
|
||||
ALL_DISTROS = discovery.get_all_distros()
|
||||
ALL_LAYERS = discovery.get_all_layers(distro=STX_DEFAULT_DISTRO)
|
||||
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=STX_DEFAULT_DISTRO)
|
||||
|
||||
logger = logging.getLogger('debcontroller')
|
||||
utils.set_logger(logger)
|
||||
|
||||
|
||||
def get_pkgname_with_dsc(dscs, dsc_path):
|
||||
for package, dsc in dscs.items():
|
||||
def get_pkg_dir_from_dsc(dscs, dsc_path):
|
||||
for pkg_dir, dsc in dscs.items():
|
||||
if dsc.strip() in dsc_path:
|
||||
return package
|
||||
return pkg_dir
|
||||
return None
|
||||
|
||||
|
||||
@ -166,63 +178,12 @@ def show_task_log(log_file, wait_time, success_str, exit_str):
|
||||
return status
|
||||
|
||||
|
||||
def bc_safe_fetch(dst_file, entry_handler=None):
|
||||
entries = []
|
||||
try:
|
||||
with open(dst_file, 'r') as flist:
|
||||
lines = list(line for line in (p.strip() for p in flist) if line)
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
else:
|
||||
for entry in lines:
|
||||
entry = entry.strip()
|
||||
if entry.startswith('#'):
|
||||
continue
|
||||
if entry_handler:
|
||||
entries.append(entry_handler(entry))
|
||||
else:
|
||||
entries.append(entry)
|
||||
return entries
|
||||
|
||||
|
||||
def pkgdirs_entry_handler(entry):
|
||||
if entry:
|
||||
return os.path.basename(entry)
|
||||
return []
|
||||
|
||||
|
||||
def get_pkgs_of_layer(layer):
|
||||
"""
|
||||
Scan all STX source layers to get all buildable packages of layer
|
||||
debian_build_layer.cfg defines whether the STX source layer belongs
|
||||
to 'distor' or 'flock' or other layer
|
||||
Params: None
|
||||
Return:
|
||||
List of all STX buildable packages of layer
|
||||
"""
|
||||
pkgs = []
|
||||
stx_src_root = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
|
||||
'cgcs-root/stx')
|
||||
for root, dirs, files in os.walk(stx_src_root):
|
||||
if dirs:
|
||||
pass
|
||||
for r in files:
|
||||
if r == 'debian_build_layer.cfg':
|
||||
layers = []
|
||||
layer_file = os.path.join(root, r)
|
||||
layers.extend(bc_safe_fetch(layer_file, None))
|
||||
if layer in layers:
|
||||
# The current STX src layer belongs to 'layer'
|
||||
pkgs_f = os.path.join(root, 'debian_pkg_dirs')
|
||||
msg = ' '.join(['Pkgdirs', pkgs_f, 'for layer', layer])
|
||||
logger.debug(msg)
|
||||
pkgs.extend(bc_safe_fetch(pkgs_f, pkgdirs_entry_handler))
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
def get_all_packages():
|
||||
"""
|
||||
Scan all STX source layers to get all buildable packages
|
||||
@ -243,35 +204,7 @@ def get_all_packages():
|
||||
return list(set(pkgs))
|
||||
|
||||
|
||||
def fetch_debian_folder(package):
|
||||
for layer in STX_SOURCE_REPOS:
|
||||
pkg_dir_file = os.path.join(STX_ROOT, 'cgcs-root/stx', layer,
|
||||
'debian_pkg_dirs')
|
||||
if not os.path.exists(pkg_dir_file):
|
||||
logger.warning('debian_pkg_dirs does not exist for layer %s, please check', layer)
|
||||
continue
|
||||
|
||||
logger.debug(' '.join(['Fetching debian meta in', pkg_dir_file]))
|
||||
try:
|
||||
with open(pkg_dir_file, 'r') as fdir:
|
||||
debs = list(line for line in (d.strip() for d in fdir) if line)
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
else:
|
||||
for deb in debs:
|
||||
deb = deb.strip()
|
||||
if deb.startswith('#'):
|
||||
continue
|
||||
if os.path.basename(deb) == package:
|
||||
msg = ' '.join(['Meta of', package, 'in', deb])
|
||||
logger.debug(msg)
|
||||
return os.path.join(STX_ROOT, 'cgcs-root/stx', layer, deb)
|
||||
return None
|
||||
|
||||
|
||||
def get_package_jobs(package):
|
||||
def get_package_jobs(pkg_dir, distro=STX_DEFAULT_DISTRO):
|
||||
'''
|
||||
Returns the number of parallel jobs of the package
|
||||
If the serial build is not enabled by the meta file,
|
||||
@ -279,7 +212,7 @@ def get_package_jobs(package):
|
||||
environment variable MAX_CPUS.
|
||||
'''
|
||||
jobs = os.environ.get('MAX_CPUS', 1)
|
||||
pkg_dir = fetch_debian_folder(package)
|
||||
package = discovery.package_dir_to_package_name(pkg_dir, distro=distro)
|
||||
if pkg_dir:
|
||||
pkg_meta_yaml = os.path.join(pkg_dir, 'debian/meta_data.yaml')
|
||||
try:
|
||||
@ -306,19 +239,19 @@ class BuildController():
|
||||
'Status: fail': build fail
|
||||
'Status: give-back': try again later
|
||||
"""
|
||||
def __init__(self):
|
||||
def __init__(self, distro=STX_DEFAULT_DISTRO):
|
||||
self.attrs = {
|
||||
'mode': 'private',
|
||||
'type': 'std',
|
||||
'distro': distro,
|
||||
'avoid': True,
|
||||
'parallel': False,
|
||||
'exit_on_fail': False,
|
||||
'run_tests': False
|
||||
}
|
||||
self.kits = {
|
||||
'dsc_cache': None,
|
||||
'dsc_cache': {},
|
||||
'repo_mgr': None,
|
||||
'dsc_maker': None
|
||||
'dsc_maker': {},
|
||||
}
|
||||
self.lists = {
|
||||
'success': [],
|
||||
@ -342,13 +275,18 @@ class BuildController():
|
||||
def build_avoid(self, avoid):
|
||||
self.attrs['avoid'] = avoid
|
||||
|
||||
def start(self):
|
||||
if not self.kits['dsc_cache']:
|
||||
pkl_file = os.path.join(BUILD_ROOT, self.attrs['type'], 'dsc.pkl')
|
||||
self.kits['dsc_cache'] = dsccache.DscCache(logger, pkl_file)
|
||||
if not self.kits['dsc_cache']:
|
||||
logger.warning(' '.join(['Failed to create dsc cache',
|
||||
pkl_file]))
|
||||
def start(self, build_types=ALL_BUILD_TYPES):
|
||||
build_types_to_init = ALL_BUILD_TYPES
|
||||
if build_types is not None:
|
||||
build_types_to_init = build_types
|
||||
|
||||
for build_type in build_types_to_init:
|
||||
if not build_type in self.kits['dsc_cache']:
|
||||
pkl_file = os.path.join(BUILD_ROOT, build_type, 'dsc.pkl')
|
||||
self.kits['dsc_cache'][build_type] = dsccache.DscCache(logger, pkl_file)
|
||||
if not self.kits['dsc_cache'][build_type]:
|
||||
logger.warning(' '.join(['Failed to create dsc cache',
|
||||
pkl_file]))
|
||||
|
||||
if not self.kits['repo_mgr']:
|
||||
logger.critical("Failed to create repo manager")
|
||||
@ -356,46 +294,54 @@ class BuildController():
|
||||
self.kits['repo_mgr'].upload_pkg(REPO_BUILD, None)
|
||||
self.kits['repo_mgr'].upload_pkg(REPO_SOURCE, None)
|
||||
|
||||
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
||||
os.makedirs(build_dir, exist_ok=True)
|
||||
|
||||
recipes_dir = os.path.join(BUILD_ROOT, 'recipes')
|
||||
os.makedirs(recipes_dir, exist_ok=True)
|
||||
|
||||
if not self.kits['dsc_maker']:
|
||||
try:
|
||||
self.kits['dsc_maker'] = debrepack.Parser(build_dir,
|
||||
recipes_dir, 'debug')
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.error("Failed to create dsc maker")
|
||||
return False
|
||||
else:
|
||||
logger.info("Successfully created dsc maker")
|
||||
for build_type in build_types_to_init:
|
||||
build_dir = os.path.join(BUILD_ROOT, build_type)
|
||||
os.makedirs(build_dir, exist_ok=True)
|
||||
|
||||
if not build_type in self.kits['dsc_maker']:
|
||||
try:
|
||||
self.kits['dsc_maker'][build_type] = debrepack.Parser(build_dir,
|
||||
recipes_dir, 'debug')
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.error("Failed to create dsc maker")
|
||||
return False
|
||||
else:
|
||||
logger.info("Successfully created dsc maker")
|
||||
|
||||
# load the persistent chroot on shared volume
|
||||
logger.info("Loading chroot")
|
||||
req_chroots_action('loadchroot', None)
|
||||
logger.info("Successfully loaded chroot")
|
||||
return True
|
||||
|
||||
def stop(self):
|
||||
return self.show_build_stats()
|
||||
|
||||
def clean(self):
|
||||
def clean(self, build_types=ALL_BUILD_TYPES):
|
||||
"""
|
||||
Clean the build env includes cleaning all these build artifacts under
|
||||
<path to>/std or <path to>/rt and empty the local build repo
|
||||
"""
|
||||
|
||||
if build_types is None:
|
||||
build_types=ALL_BUILD_TYPES
|
||||
|
||||
# clean build artifacts
|
||||
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
||||
if os.path.exists(build_dir):
|
||||
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
|
||||
try:
|
||||
shutil.rmtree(build_dir)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.error("Failed to clean of the build directory")
|
||||
else:
|
||||
logger.info("Finished cleaning of the build directory")
|
||||
for build_type in build_types:
|
||||
build_dir = os.path.join(BUILD_ROOT, build_type)
|
||||
if os.path.exists(build_dir):
|
||||
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
|
||||
try:
|
||||
shutil.rmtree(build_dir)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.error("Failed to clean of the build directory")
|
||||
else:
|
||||
logger.info("Finished cleaning of the build directory")
|
||||
|
||||
# clean build repo
|
||||
if self.kits['repo_mgr']:
|
||||
@ -484,14 +430,14 @@ class BuildController():
|
||||
debsentry.set_subdebs(debs_clue, package, sdebs, logger)
|
||||
return True
|
||||
|
||||
def upload_with_dsc(self, deb, dsc, repo_name):
|
||||
def upload_with_dsc(self, pkg_name, dsc, repo_name):
|
||||
if not os.path.exists(dsc):
|
||||
logger.error(' '.join(['Dsc file', dsc, 'does not exist']))
|
||||
return False
|
||||
|
||||
dsc_pkg = os.path.basename(dsc).split('_')[0]
|
||||
if deb != dsc_pkg:
|
||||
logger.warning(''.join(['Package name passed in is ', deb,
|
||||
if pkg_name != dsc_pkg:
|
||||
logger.warning(''.join(['Package name passed in is ', pkg_name,
|
||||
', from dsc is ', dsc_pkg, ' ,did not match.']))
|
||||
logger.info(' '.join(['Existing source for', dsc_pkg,
|
||||
'will be deleted from repository', repo_name, 'before new source is uploaded']))
|
||||
@ -512,20 +458,23 @@ class BuildController():
|
||||
logger.info("Successfully uploaded source %s to repository %s", dsc, repo_name)
|
||||
return True
|
||||
|
||||
def req_add_task(self, package, dsc_path):
|
||||
def req_add_task(self, pkg_dir, dsc_path, build_type=STX_DEFAULT_BUILD_TYPE):
|
||||
status = 'fail'
|
||||
dsc = os.path.basename(dsc_path)
|
||||
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
|
||||
|
||||
req_params = {}
|
||||
req_params['mode'] = self.attrs['mode']
|
||||
req_params['type'] = self.attrs['type']
|
||||
req_params['type'] = build_type
|
||||
req_params['project'] = PROJECT
|
||||
req_params['user'] = USER
|
||||
req_params['name'] = package
|
||||
req_params['name'] = pkg_name
|
||||
req_params['dsc'] = dsc
|
||||
req_params['jobs'] = get_package_jobs(package)
|
||||
req_params['jobs'] = get_package_jobs(pkg_dir, self.attrs['distro'])
|
||||
req_params['run_tests'] = self.attrs['run_tests']
|
||||
|
||||
|
||||
try:
|
||||
resp = requests.get(BUILDER_URL + 'addtask', data=req_params)
|
||||
resp.raise_for_status()
|
||||
@ -534,13 +483,13 @@ class BuildController():
|
||||
else:
|
||||
logger.debug(resp.text)
|
||||
if 'success' in resp.text:
|
||||
log = os.path.join(BUILD_ROOT, self.attrs['type'], package,
|
||||
log = os.path.join(BUILD_ROOT, build_type, pkg_name,
|
||||
dsc.replace('.dsc', '_amd64.build'))
|
||||
ret = show_task_log(log, 3, 'Status: successful',
|
||||
'Finished at')
|
||||
if 'success' in ret:
|
||||
self.upload_with_deb(package, os.path.join(BUILD_ROOT,
|
||||
self.attrs['type'], package))
|
||||
self.upload_with_deb(pkg_name, os.path.join(BUILD_ROOT,
|
||||
build_type, pkg_name))
|
||||
self.req_kill_task('sbuild')
|
||||
status = 'success'
|
||||
return status
|
||||
@ -572,136 +521,224 @@ class BuildController():
|
||||
else:
|
||||
logger.debug(resp.text)
|
||||
|
||||
def create_dsc(self, package, pkg_meta):
|
||||
def create_dsc(self, pkg_name, pkg_dir, build_type=STX_DEFAULT_BUILD_TYPE):
|
||||
"""
|
||||
Call dsc maker(debrepack) to generate the new dsc for package
|
||||
Params:
|
||||
package: package name
|
||||
pkg_meta: path to the package's debian folder
|
||||
pkg_name: package name
|
||||
pkg_dir: path to the directory containing the package's debian folder
|
||||
build_type: build type ... probably 'std' or 'rt'
|
||||
Return: result list like:
|
||||
['dhcp-2.10.1.tis.dsc' 'dhcp-2.10.tar.xz' 'dhcp-2.10.tar.xz.orig']
|
||||
"""
|
||||
skip_build = False
|
||||
# Check whether there are changes on package's debian folder
|
||||
new_checksum = self.kits['dsc_maker'].checksum(pkg_meta)
|
||||
self.pkgs_digests[package] = new_checksum
|
||||
if self.kits['dsc_cache']:
|
||||
old_checksum = self.kits['dsc_cache'].get_package_digest(package)
|
||||
new_checksum = self.kits['dsc_maker'][build_type].checksum(pkg_dir)
|
||||
self.pkgs_digests[pkg_dir] = new_checksum
|
||||
if self.kits['dsc_cache'][build_type]:
|
||||
old_checksum = self.kits['dsc_cache'][build_type].get_package_digest(pkg_dir)
|
||||
if old_checksum and old_checksum == new_checksum:
|
||||
logger.info(' '.join(['No source meta changes of', package]))
|
||||
logger.info(' '.join(['No source meta changes of', pkg_name]))
|
||||
skip_build = True
|
||||
|
||||
if self.attrs['avoid'] and skip_build:
|
||||
self.lists['success'].append(package)
|
||||
logger.info(' '.join(['Skip build', package, 'again']))
|
||||
self.lists['success'].append(pkg_dir)
|
||||
logger.info(' '.join(['Skip build', pkg_name, 'again']))
|
||||
logger.info(' '.join(['Force to build, please use -c/--clean']))
|
||||
return None
|
||||
return []
|
||||
|
||||
logger.debug(' '.join([pkg_meta, 'is ready to create dsc']))
|
||||
logger.debug(' '.join([pkg_dir, 'is ready to create dsc']))
|
||||
|
||||
pkgdir = os.path.join(BUILD_ROOT, self.attrs['type'], package)
|
||||
if os.path.exists(pkgdir):
|
||||
# TODO, add additional path elements like layer?
|
||||
pkg_build_dir = os.path.join(BUILD_ROOT, build_type, pkg_name)
|
||||
if os.path.exists(pkg_build_dir):
|
||||
try:
|
||||
shutil.rmtree(pkgdir)
|
||||
shutil.rmtree(pkg_build_dir)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
else:
|
||||
logger.debug(' '.join(['Successfully removed old', pkgdir]))
|
||||
os.makedirs(pkgdir)
|
||||
logger.debug(' '.join(['Successfully removed old', pkg_build_dir]))
|
||||
os.makedirs(pkg_build_dir)
|
||||
|
||||
try:
|
||||
src_mirror_dir = os.path.join(os.environ.get('STX_MIRROR'), 'sources')
|
||||
dsc_recipes = self.kits['dsc_maker'].package(pkg_meta, src_mirror_dir)
|
||||
dsc_recipes = self.kits['dsc_maker'][build_type].package(pkg_dir, src_mirror_dir)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
else:
|
||||
if not dsc_recipes:
|
||||
logger.error(' '.join(['Failed to create dsc for', package]))
|
||||
logger.error(' '.join(['Failed to create dsc for', pkg_name]))
|
||||
return None
|
||||
logger.debug(' '.join(['Successfully created dsc for', package]))
|
||||
logger.debug(' '.join(['Successfully created dsc for', pkg_name]))
|
||||
return dsc_recipes
|
||||
|
||||
def run_build_loop(self, pkgs_dsc):
|
||||
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
||||
|
||||
def run_build_loop(self, pkgs_dsc, build_type=STX_DEFAULT_BUILD_TYPE):
|
||||
build_dir = os.path.join(BUILD_ROOT, build_type)
|
||||
dsc_list_file = os.path.join(build_dir, 'dsc.lst')
|
||||
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, logger)
|
||||
|
||||
for p in range(len(pkgs_dsc)):
|
||||
pkgs_can_build = deps_resolver.get_build_able_pkg(1)
|
||||
for dsc in pkgs_can_build:
|
||||
for dsc_path in pkgs_can_build:
|
||||
logger.info(' '.join(['Depends resolver tells to build',
|
||||
os.path.basename(dsc)]))
|
||||
package = get_pkgname_with_dsc(pkgs_dsc, dsc)
|
||||
status = self.req_add_task(package, dsc)
|
||||
os.path.basename(dsc_path)]))
|
||||
pkg_dir = get_pkg_dir_from_dsc(pkgs_dsc, dsc_path)
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
|
||||
status = self.req_add_task(pkg_dir, dsc_path, build_type=build_type)
|
||||
if 'success' in status:
|
||||
logger.info(' '.join(['Successfully built',
|
||||
package]))
|
||||
deps_resolver.pkg_accomplish(dsc)
|
||||
self.lists['success'].append(package)
|
||||
pkg_md5 = self.pkgs_digests[package]
|
||||
self.kits['dsc_cache'].set_package_digest(package, pkg_md5)
|
||||
pkg_name]))
|
||||
deps_resolver.pkg_accomplish(dsc_path)
|
||||
self.lists['success'].append(pkg_dir)
|
||||
pkg_md5 = self.pkgs_digests[pkg_dir]
|
||||
self.kits['dsc_cache'][build_type].set_package_digest(pkg_dir, pkg_md5)
|
||||
else:
|
||||
logger.info(' '.join(['Failed to build', package, str(p)]))
|
||||
self.lists['fail'].append(package)
|
||||
logger.info(' '.join(['Failed to build', pkg_name, str(p)]))
|
||||
self.lists['fail'].append(pkg_dir)
|
||||
self.req_stop_task()
|
||||
if self.attrs['exit_on_fail']:
|
||||
return
|
||||
|
||||
logger.info("Build loop done, please check the stats")
|
||||
|
||||
def build_route(self, port, data):
|
||||
if port == 'package':
|
||||
self.build_packages(data)
|
||||
if port == 'layer':
|
||||
self.build_layers(data)
|
||||
if build_port == 'all':
|
||||
self.build_all()
|
||||
|
||||
def build_all(self):
|
||||
packages = get_all_packages()
|
||||
if packages:
|
||||
total_pkgs = len(packages)
|
||||
logger.debug(''.join(['All packages(', str(total_pkgs), '):',
|
||||
','.join(packages)]))
|
||||
self.build_packages(packages)
|
||||
def build_all(self, layers=ALL_LAYERS, build_types=None, packages=None):
|
||||
if layers:
|
||||
for layer in layers:
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
return
|
||||
else:
|
||||
logger.error('Failed to get all buildable packages')
|
||||
layers = ALL_LAYERS
|
||||
|
||||
def build_layers(self, layers):
|
||||
if not layers:
|
||||
logger.error('Failed to get layers')
|
||||
if build_types:
|
||||
for build_type in build_types:
|
||||
if build_type not in ALL_BUILD_TYPES:
|
||||
logger.error(' '.join([build_type, 'is not a valid build_type']))
|
||||
return
|
||||
|
||||
if layers:
|
||||
total_layers = len(layers)
|
||||
logger.debug(' '.join(['Building ', str(total_layers), ' layers:',
|
||||
','.join(layers)]))
|
||||
self.build_layers(layers=layers, build_types=build_types, packages=packages)
|
||||
else:
|
||||
logger.error('No layeres specified for the build.')
|
||||
|
||||
|
||||
def build_layer_and_build_type(self, layer=None, build_type=None, packages=None):
|
||||
if not layer:
|
||||
logger.error('Failed to specify layer')
|
||||
return
|
||||
|
||||
if not build_type:
|
||||
logger.error('Failed to specify build_type')
|
||||
return
|
||||
|
||||
pkg_dirs = discovery.package_dir_list(distro=self.attrs['distro'], layer=layer, build_type=build_type)
|
||||
word = "all"
|
||||
if packages:
|
||||
word = "selected"
|
||||
pkg_dirs = discovery.filter_package_dirs_by_package_names(pkg_dirs, packages, distro=self.attrs['distro'])
|
||||
|
||||
if not pkg_dirs:
|
||||
logger.debug(' '.join(['Found no buildable packages matching selection criteria in',
|
||||
'build_type', build_type,
|
||||
'of layer', layer]))
|
||||
return
|
||||
|
||||
logger.info(' '.join(['Start to build', word, 'packages in',
|
||||
'build_type', build_type,
|
||||
'of layer', layer]))
|
||||
|
||||
packages = discovery.package_dirs_to_package_names(pkg_dirs)
|
||||
logger.debug(' '.join(['Building packages:',
|
||||
','.join(packages)]))
|
||||
self.build_packages(pkg_dirs=pkg_dirs, build_type=build_type)
|
||||
|
||||
logger.info(' '.join(['Finished building packages in',
|
||||
'build_type', build_type,
|
||||
'of layer', layer]))
|
||||
|
||||
|
||||
def build_layer_and_build_types(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
|
||||
if not layer:
|
||||
logger.error('Failed to specify layer')
|
||||
return
|
||||
|
||||
if not build_types:
|
||||
logger.error('Failed to specify build_types')
|
||||
return
|
||||
|
||||
# remove duplication
|
||||
layers = list(set(layers))
|
||||
for layer in layers:
|
||||
if layer not in STX_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
else:
|
||||
logger.info(' '.join(['Start to build all packages in layer',
|
||||
layer]))
|
||||
packages = get_pkgs_of_layer(layer)
|
||||
if packages:
|
||||
logger.debug(''.join([layer, ' need packages:',
|
||||
','.join(packages)]))
|
||||
self.build_packages(packages)
|
||||
else:
|
||||
logger.error(' '.join(['Failed to get packages for layer',
|
||||
layer]))
|
||||
logger.info(' '.join(['Finished building packages in layer',
|
||||
layer]))
|
||||
build_types = list(set(build_types))
|
||||
|
||||
valid_build_type = discovery.get_layer_build_types(layer, distro=self.attrs['distro'])
|
||||
|
||||
# sort the build_type list so we build in the proper order
|
||||
build_types = discovery.sort_build_type_list(build_types, layer, distro=self.attrs['distro'])
|
||||
|
||||
for build_type in build_types:
|
||||
if build_type not in valid_build_type:
|
||||
logger.info(' '.join(['Skipping build_type', build_type, 'which is not a valid for layer', layer]))
|
||||
continue
|
||||
self.build_layer_and_build_type(layer=layer, build_type=build_type, packages=packages)
|
||||
|
||||
return
|
||||
|
||||
def build_packages(self, packages):
|
||||
def build_layer(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
|
||||
if not layer:
|
||||
logger.error('Failed to specify layer')
|
||||
return
|
||||
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
return
|
||||
|
||||
logger.info(' '.join(['Start to build all packages in layer',
|
||||
layer]))
|
||||
self.build_layer_and_build_types(layer=layer, build_types=build_types, packages=packages)
|
||||
logger.info(' '.join(['Finished building packages in layer',
|
||||
layer]))
|
||||
return
|
||||
|
||||
|
||||
def build_layers(self, layers=None, build_types=None, packages=None):
|
||||
if not layers:
|
||||
logger.error('Failed to specify layers')
|
||||
return
|
||||
|
||||
# remove duplication
|
||||
packages = list(set(packages))
|
||||
layers = list(set(layers))
|
||||
|
||||
for layer in layers:
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
return
|
||||
|
||||
# sort the layer list so we build in the proper order
|
||||
layers = discovery.sort_layer_list(layers, distro=self.attrs['distro'])
|
||||
|
||||
for layer in layers:
|
||||
if build_types is None:
|
||||
build_types = discovery.get_layer_build_types(layer=layer, distro=self.attrs['distro'])
|
||||
self.build_layer(layer=layer, build_types=build_types, packages=packages)
|
||||
|
||||
return
|
||||
|
||||
def build_packages(self, pkg_dirs, build_type=STX_DEFAULT_BUILD_TYPE):
|
||||
# remove duplication
|
||||
pkg_dirs = list(set(pkg_dirs))
|
||||
|
||||
logger.debug(' '.join(['build_packages: Building: ', str(pkg_dirs)]))
|
||||
|
||||
fdsc_file = None
|
||||
packages_dscs = {}
|
||||
self.lists['build-needed'] = packages
|
||||
self.lists['build-needed'] = pkg_dirs
|
||||
|
||||
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
||||
build_dir = os.path.join(BUILD_ROOT, build_type)
|
||||
os.makedirs(build_dir, exist_ok=True)
|
||||
|
||||
dscs_list_file = os.path.join(build_dir, 'dsc.lst')
|
||||
@ -711,33 +748,33 @@ class BuildController():
|
||||
fdsc_file.truncate()
|
||||
|
||||
# Now check and create the debian meta one by one
|
||||
for deb in packages:
|
||||
for pkg_dir in pkg_dirs:
|
||||
dsc_file = ""
|
||||
deb = deb.strip()
|
||||
deb_meta_path = fetch_debian_folder(deb)
|
||||
if not deb_meta_path:
|
||||
logger.error(' '.join(['No debian meta found, skip', deb]))
|
||||
continue
|
||||
|
||||
deb_recipes = self.create_dsc(deb, deb_meta_path)
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
|
||||
deb_recipes = self.create_dsc(pkg_name, pkg_dir, build_type=build_type)
|
||||
if deb_recipes:
|
||||
dsc_file = os.path.join(build_dir, deb, deb_recipes[0])
|
||||
packages_dscs[deb.strip()] = dsc_file
|
||||
dsc_file = os.path.join(build_dir, pkg_name, deb_recipes[0])
|
||||
logger.debug("dsc_file = %s" % dsc_file)
|
||||
packages_dscs[pkg_dir.strip()] = dsc_file
|
||||
fdsc_file.write(dsc_file + '\n')
|
||||
if self.kits['repo_mgr']:
|
||||
self.upload_with_dsc(deb, dsc_file, REPO_SOURCE)
|
||||
else:
|
||||
self.upload_with_dsc(pkg_name, dsc_file, REPO_SOURCE)
|
||||
elif deb_recipes is None:
|
||||
if self.attrs['exit_on_fail']:
|
||||
if fdsc_file:
|
||||
fdsc_file.close()
|
||||
return
|
||||
continue
|
||||
else:
|
||||
# Empty set indicates the package is unchanged
|
||||
continue
|
||||
|
||||
if fdsc_file:
|
||||
fdsc_file.close()
|
||||
|
||||
# Start to build
|
||||
if packages_dscs:
|
||||
self.run_build_loop(packages_dscs)
|
||||
self.run_build_loop(packages_dscs, build_type=build_type)
|
||||
else:
|
||||
logger.info("No debian dsc files found")
|
||||
|
||||
@ -752,23 +789,26 @@ class BuildController():
|
||||
success_number = len(self.lists['success'])
|
||||
if success_number > 0:
|
||||
logger.info("Successfully built: %d", success_number)
|
||||
for deb in sorted(self.lists['success']):
|
||||
logger.info(deb)
|
||||
for pkg_dir in sorted(self.lists['success']):
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
|
||||
logger.info(pkg_name)
|
||||
|
||||
# failed_pkgs is the universal set of failed packages for various reasons
|
||||
failed_pkgs = list(set(self.lists['build-needed']) - set(self.lists['success']))
|
||||
failed_number = len(failed_pkgs)
|
||||
# failed_pkg_dirs is the universal set of failed packages for various reasons
|
||||
failed_pkg_dirs = list(set(self.lists['build-needed']) - set(self.lists['success']))
|
||||
failed_number = len(failed_pkg_dirs)
|
||||
if failed_number > 0:
|
||||
ret_val = 1
|
||||
logger.error("Failed to build: %d", failed_number)
|
||||
for deb in sorted(failed_pkgs):
|
||||
logger.error(deb)
|
||||
# self.lists['fail'] is the subset of failed_pkgs
|
||||
for pkg_dir in sorted(failed_pkg_dirs):
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
|
||||
logger.error(pkg_name)
|
||||
# self.lists['fail'] is the subset of failed_pkg_dirs
|
||||
# particularly refer to those failed packages reported by pkgbuilder
|
||||
if len(self.lists['fail']) > 0:
|
||||
logger.info("List of failed packages:")
|
||||
for deb in sorted(self.lists['fail']):
|
||||
logger.error(deb)
|
||||
for pkg_dir in sorted(self.lists['fail']):
|
||||
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
|
||||
logger.error(pkg_name)
|
||||
logger.info("For the failure reason, you can check with:")
|
||||
logger.info("\'cat /localdisk/builder.log | grep ERROR\' or")
|
||||
logger.info("\'cat ${MY_WORKSPACE}/<std or rt>/<Failed package>/*.build\'")
|
||||
@ -796,53 +836,77 @@ def bc_reg_signal_handler():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
default_layer = 'distro'
|
||||
build_port = 'all'
|
||||
build_data = None
|
||||
distro = STX_DEFAULT_DISTRO
|
||||
layers = None
|
||||
build_types = None
|
||||
packages = None
|
||||
|
||||
parser = argparse.ArgumentParser(description="build-pkgs helper")
|
||||
parser.add_argument('-c', '--clean', help="Start a fresh building",
|
||||
parser.add_argument('-c', '--clean', help="Start a fresh build",
|
||||
action='store_true')
|
||||
parser.add_argument('-e', '--exit_on_fail', help="Exit for any fail",
|
||||
action='store_true')
|
||||
parser.add_argument('-t', '--test', help="Run package tests during build",
|
||||
action='store_true')
|
||||
|
||||
parser.add_argument('-d', '--distro', type=str, nargs=1,
|
||||
help="name of the distro to build\n %s" % ALL_DISTROS,
|
||||
default=STX_DEFAULT_DISTRO, required=False)
|
||||
parser.add_argument('-b', '--build-types', type=str,
|
||||
help="comma separated list of all build-types to build\n %s" % ALL_BUILD_TYPES,
|
||||
default=None, required=False)
|
||||
parser.add_argument('-l', '--layers', type=str,
|
||||
help="comma separated list of all layers to build\n %s" % ALL_LAYERS,
|
||||
default=None, required=False)
|
||||
# set mutually options pair for package build and layer build
|
||||
build_group = parser.add_mutually_exclusive_group()
|
||||
build_group.add_argument('-a', '--all', help="Packages with comma",
|
||||
build_group.add_argument('-a', '--all', help="Builds all packages",
|
||||
action='store_true')
|
||||
build_group.add_argument('-l', '--layers', help="Layers with comma",
|
||||
type=str)
|
||||
build_group.add_argument('-p', '--packages', help="Packages with comma",
|
||||
type=str)
|
||||
args = parser.parse_args()
|
||||
if args.packages:
|
||||
build_port = 'package'
|
||||
build_data = args.packages.strip().split(',')
|
||||
else:
|
||||
if args.layers:
|
||||
build_port = 'layer'
|
||||
build_data = args.layers.strip().split(',')
|
||||
else:
|
||||
if args.all:
|
||||
build_port = 'all'
|
||||
build_data = None
|
||||
else:
|
||||
|
||||
if args.distro:
|
||||
if args.distro not in ALL_DISTROS:
|
||||
logger.error(' '.join(['Distro', args.distro, 'not in', ','.join(ALL_DISTROS)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
distro = args.distro
|
||||
ALL_LAYERS = discovery.get_all_layers(distro=distro)
|
||||
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=distro)
|
||||
|
||||
if args.build_types:
|
||||
build_types = args.build_types.strip().split(',')
|
||||
for build_type in build_types:
|
||||
if build_type not in ALL_BUILD_TYPES:
|
||||
logger.error(' '.join(['Build_type', build_type, 'not in', ','.join(ALL_BUILD_TYPES)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
|
||||
build_controller = BuildController()
|
||||
if args.layers:
|
||||
layers = args.layers.strip().split(',')
|
||||
for layer in layers:
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join(['Layer', layer, 'not in', ','.join(ALL_LAYERS)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
|
||||
if args.packages:
|
||||
packages = args.packages.strip().split(',')
|
||||
else:
|
||||
if args.all:
|
||||
packages = None
|
||||
|
||||
build_controller = BuildController(distro=distro)
|
||||
if args.clean:
|
||||
build_controller.build_avoid = False
|
||||
if build_port == 'all':
|
||||
build_controller.clean()
|
||||
if not packages:
|
||||
build_controller.clean(build_types=build_types)
|
||||
if args.exit_on_fail:
|
||||
build_controller.attrs['exit_on_fail'] = True
|
||||
if args.test:
|
||||
build_controller.attrs['run_tests'] = True
|
||||
|
||||
if not build_controller.start():
|
||||
if not build_controller.start(build_types=build_types):
|
||||
logger.critical("Fail to initialize build controller, exit ......")
|
||||
sys.exit(1)
|
||||
|
||||
@ -856,8 +920,8 @@ if __name__ == "__main__":
|
||||
pkgbuilder_log]))
|
||||
sys.exit(1)
|
||||
|
||||
build_controller.build_route(build_port, build_data)
|
||||
build_controller.build_all(layers=layers, build_types=build_types, packages=packages)
|
||||
ret_value = build_controller.stop()
|
||||
|
||||
logger.info("Build controller done")
|
||||
logger.info("build-pkgs done")
|
||||
sys.exit(ret_value)
|
||||
|
@ -16,6 +16,7 @@
|
||||
import apt_pkg
|
||||
import debian.deb822
|
||||
from debian.debian_support import BaseVersion
|
||||
import discovery
|
||||
import git
|
||||
import hashlib
|
||||
import logging
|
||||
@ -271,7 +272,7 @@ class Parser():
|
||||
raise Exception(f"{pkgpath}: No such file or directory")
|
||||
|
||||
self.pkginfo["pkgpath"] = os.path.abspath(pkgpath)
|
||||
self.pkginfo["pkgname"] = os.path.basename(pkgpath)
|
||||
self.pkginfo["pkgname"] = discovery.package_dir_to_package_name(pkgpath, 'debian')
|
||||
self.pkginfo["packdir"] = os.path.join(self.basedir, self.pkginfo["pkgname"])
|
||||
|
||||
self.pkginfo["debfolder"] = os.path.join(self.pkginfo["pkgpath"], "debian")
|
||||
|
220
build-tools/stx/discovery.py
Normal file
220
build-tools/stx/discovery.py
Normal file
@ -0,0 +1,220 @@
|
||||
# Copyright (c) 2021 Wind River Systems, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
import yaml
|
||||
|
||||
from git_utils import git_list
|
||||
from repo_utils import repo_root
|
||||
from utils import bc_safe_fetch
|
||||
|
||||
LAYER_PRIORITY_DEFAULT = 99
|
||||
BUILD_TYPE_PRIORITY_DEFAULT = 99
|
||||
|
||||
STX_DEFAULT_DISTRO = "debian"
|
||||
STX_DEFAULT_DISTRO_LIST = [ "debian", "centos" ]
|
||||
STX_DEFAULT_BUILD_TYPE = "std"
|
||||
STX_DEFAULT_BUILD_TYPE_LIST = [STX_DEFAULT_BUILD_TYPE]
|
||||
|
||||
|
||||
def get_all_distros():
|
||||
distro_lst = STX_DEFAULT_DISTRO_LIST
|
||||
return sorted(distro_lst)
|
||||
|
||||
def get_build_type_priority(build_type, layer, distro="debian"):
|
||||
prio = BUILD_TYPE_PRIORITY_DEFAULT
|
||||
if build_type is None:
|
||||
return BUILD_TYPE_PRIORITY_DEFAULT
|
||||
dir = os.environ.get('MY_REPO_ROOT_DIR')
|
||||
if dir is None:
|
||||
return BUILD_TYPE_PRIORITY_DEFAULT
|
||||
if not os.path.isdir(dir):
|
||||
return BUILD_TYPE_PRIORITY_DEFAULT
|
||||
build_type_priority_file = os.path.join(dir, "stx-tools",
|
||||
"%s%s" % (distro, "-mirror-tools"),
|
||||
"config", distro, layer,
|
||||
build_type, "priority")
|
||||
if not os.path.isfile(build_type_priority_file):
|
||||
return BUILD_TYPE_PRIORITY_DEFAULT
|
||||
prio = int(bc_safe_fetch(build_type_priority_file, None)[0])
|
||||
return prio
|
||||
|
||||
|
||||
def get_layer_priority(layer, distro="debian"):
|
||||
prio = LAYER_PRIORITY_DEFAULT
|
||||
if layer is None:
|
||||
return LAYER_PRIORITY_DEFAULT
|
||||
dir = os.environ.get('MY_REPO_ROOT_DIR')
|
||||
if dir is None:
|
||||
return LAYER_PRIORITY_DEFAULT
|
||||
if not os.path.isdir(dir):
|
||||
return LAYER_PRIORITY_DEFAULT
|
||||
layer_priority_file = os.path.join(dir, "stx-tools",
|
||||
"%s%s" % (distro, "-mirror-tools"),
|
||||
"config", distro, layer, "priority")
|
||||
if not os.path.isfile(layer_priority_file):
|
||||
return LAYER_PRIORITY_DEFAULT
|
||||
prio = int(bc_safe_fetch(layer_priority_file, None)[0])
|
||||
return prio
|
||||
|
||||
|
||||
def sort_layer_list (layer_list, distro="debian"):
|
||||
layer_dict = {}
|
||||
for layer in layer_list:
|
||||
prio = get_layer_priority(layer, distro=distro)
|
||||
layer_dict[prio] = layer
|
||||
keys = sorted(layer_dict.keys())
|
||||
result = []
|
||||
for key in keys:
|
||||
result.append(layer_dict[key])
|
||||
return result
|
||||
|
||||
|
||||
def get_all_layers (distro="debian"):
|
||||
layer_lst = []
|
||||
project_dir_list_all = project_dir_list(distro=distro, layer="all")
|
||||
for proj_dir in project_dir_list_all:
|
||||
layer_file = os.path.join(proj_dir, "%s%s" % (distro, "_build_layer.cfg"))
|
||||
if not os.path.isfile(layer_file):
|
||||
continue
|
||||
layer_lst.extend(bc_safe_fetch(layer_file, None))
|
||||
# remove duplicates
|
||||
layer_lst = list(set(layer_lst))
|
||||
return sort_layer_list(layer_lst)
|
||||
|
||||
|
||||
def sort_build_type_list (build_type_list, layer, distro="debian"):
|
||||
build_type_dict = {}
|
||||
for build_type in build_type_list:
|
||||
prio = get_build_type_priority(build_type, layer, distro=distro)
|
||||
build_type_dict[prio] = build_type
|
||||
keys = sorted(build_type_dict.keys())
|
||||
result = []
|
||||
for key in keys:
|
||||
result.append(build_type_dict[key])
|
||||
return result
|
||||
|
||||
|
||||
def get_layer_build_types (layer, distro="debian"):
|
||||
bt_lst = [ "std" ]
|
||||
project_dir_list_all = project_dir_list(distro=distro, layer=layer)
|
||||
for proj_dir in project_dir_list_all:
|
||||
for pkg_dir_file in glob.glob("%s/%s%s" % (proj_dir, distro, "_pkg_dirs_*")):
|
||||
bt = os.path.basename(pkg_dir_file).split("_pkg_dirs_")[1]
|
||||
if not bt in bt_lst:
|
||||
bt_lst.append(bt)
|
||||
return sort_build_type_list(bt_lst, layer)
|
||||
|
||||
|
||||
def get_all_build_types (distro="debian"):
|
||||
bt_lst = [ "std" ]
|
||||
project_dir_list_all = project_dir_list(distro=distro, layer="all")
|
||||
for proj_dir in project_dir_list_all:
|
||||
for pkg_dir_file in glob.glob("%s/%s%s" % (proj_dir, distro, "_pkg_dirs_*")):
|
||||
bt = os.path.basename(pkg_dir_file).split("_pkg_dirs_")[1]
|
||||
if not bt in bt_lst:
|
||||
bt_lst.append(bt)
|
||||
return sorted(bt_lst)
|
||||
|
||||
|
||||
|
||||
|
||||
def project_dir_list_handler (element, data):
|
||||
if element not in data['layer']:
|
||||
return []
|
||||
return [ data['proj_dir'] ]
|
||||
|
||||
# project_dir_list
|
||||
# Return a list of git root directories for the current project.
|
||||
# Optionally, the list can be filtered by distro and layer.
|
||||
def project_dir_list (distro="debian", layer="all"):
|
||||
if layer is None:
|
||||
layer = "all"
|
||||
dir = os.environ.get('MY_REPO_ROOT_DIR')
|
||||
if dir is None:
|
||||
return []
|
||||
if not os.path.isdir(dir):
|
||||
return []
|
||||
project_dir_list_all = git_list(repo_root(dir))
|
||||
# print("project_dir_list_all=%s" % project_dir_list_all)
|
||||
if layer == "all":
|
||||
return project_dir_list_all
|
||||
# A specific layer is requested.
|
||||
project_dir_list_layer = []
|
||||
for proj_dir in project_dir_list_all:
|
||||
# Does this project provide content to the desired layer?
|
||||
layer_file = os.path.join(proj_dir, "%s%s" % (distro, "_build_layer.cfg"))
|
||||
if not os.path.isfile(layer_file):
|
||||
continue
|
||||
# print("project_dir_list: considering proj_dir=%s" % proj_dir)
|
||||
project_dir_list_layer.extend(bc_safe_fetch(layer_file, project_dir_list_handler, {'layer': layer, 'proj_dir': proj_dir}))
|
||||
return project_dir_list_layer
|
||||
|
||||
|
||||
def package_dir_list_handler(entry, proj_dir):
|
||||
path = os.path.join(proj_dir, entry)
|
||||
if not os.path.isdir(path):
|
||||
return []
|
||||
return [ path ]
|
||||
|
||||
def package_dir_list (distro="debian", layer="all", build_type="std"):
|
||||
pkg_dir_list = []
|
||||
if layer is None:
|
||||
layer = "all"
|
||||
for proj_dir in project_dir_list(distro=distro, layer=layer):
|
||||
pkg_file = os.path.join(proj_dir, "%s%s%s" % (distro, "_pkg_dirs_", build_type))
|
||||
if not os.path.isfile(pkg_file):
|
||||
if build_type == "std":
|
||||
# It's permitted to omit the "_std" suffix from the file name
|
||||
pkg_file = os.path.join(proj_dir, "%s%s" % (distro, "_pkg_dirs"))
|
||||
if not os.path.isfile(pkg_file):
|
||||
continue
|
||||
pkg_dir_list.extend(bc_safe_fetch(pkg_file, package_dir_list_handler, proj_dir))
|
||||
return pkg_dir_list
|
||||
|
||||
def package_dir_to_package_name (pkg_dir, distro="debian"):
|
||||
pkg_name = os.path.basename(pkg_dir)
|
||||
if distro == "debian":
|
||||
meta_data_file = os.path.join(pkg_dir, distro, 'meta_data.yaml')
|
||||
if os.path.isfile(meta_data_file):
|
||||
with open(meta_data_file) as f:
|
||||
meta_data = yaml.full_load(f)
|
||||
if "debname" in meta_data:
|
||||
pkg_name = meta_data["debname"]
|
||||
return pkg_name
|
||||
|
||||
def package_dirs_to_package_names (pkg_dirs, distro="debian"):
|
||||
pkg_names = []
|
||||
for pkg_dir in pkg_dirs:
|
||||
pkg_names.append(package_dir_to_package_name(pkg_dir, distro="debian"))
|
||||
return pkg_names
|
||||
|
||||
def package_dirs_to_names_dict (pkg_dirs, distro="debian"):
|
||||
pkg_names = {}
|
||||
for pkg_dir in pkg_dirs:
|
||||
pkg_names[pkg_dir]=package_dir_to_package_name(pkg_dir, distro="debian")
|
||||
return pkg_names
|
||||
|
||||
def filter_package_dirs_by_package_names (pkg_dirs, package_names, distro="debian"):
|
||||
if not package_names:
|
||||
return pkg_dirs
|
||||
filtered_pkg_dirs = []
|
||||
for pkg_dir in pkg_dirs:
|
||||
pkg_name = package_dir_to_package_name(pkg_dir, distro=distro)
|
||||
if pkg_name in package_names:
|
||||
filtered_pkg_dirs.append(pkg_dir)
|
||||
return filtered_pkg_dirs
|
@ -17,6 +17,8 @@
|
||||
import apt
|
||||
import argparse
|
||||
import debrepack
|
||||
import discovery
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import repo_manage
|
||||
@ -36,6 +38,11 @@ types_pkg_dirs = ['debian_pkg_dirs', 'debian_pkg_dirs_rt', 'debian_pkg_dirs_inst
|
||||
logger = logging.getLogger('downloader')
|
||||
utils.set_logger(logger)
|
||||
|
||||
STX_DEFAULT_DISTRO = discovery.STX_DEFAULT_DISTRO
|
||||
|
||||
ALL_DISTROS = discovery.get_all_distros()
|
||||
ALL_LAYERS = discovery.get_all_layers(distro=STX_DEFAULT_DISTRO)
|
||||
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=STX_DEFAULT_DISTRO)
|
||||
|
||||
def get_downloaded(dl_dir, dl_type):
|
||||
"""
|
||||
@ -101,19 +108,49 @@ def get_all_stx_pkgs():
|
||||
return pkgs
|
||||
|
||||
|
||||
def get_all_binary_list():
|
||||
def get_all_binary_list(distro=STX_DEFAULT_DISTRO, layers=None, build_types=None):
|
||||
"""
|
||||
Return all binary packages listed in base-bullseye.lst, os-std.lst,os-rt.lst
|
||||
"""
|
||||
bin_list = []
|
||||
stx_config = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
|
||||
'stx-tools/debian-mirror-tools/config/debian')
|
||||
for root, dirs, files in os.walk(stx_config):
|
||||
if dirs:
|
||||
pass
|
||||
for r in files:
|
||||
if r in all_binary_lists:
|
||||
bin_list.append(os.path.join(root, r))
|
||||
|
||||
if layers:
|
||||
for layer in layers:
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
return
|
||||
else:
|
||||
layers = ALL_LAYERS
|
||||
|
||||
for layer in layers:
|
||||
search_dir = os.path.join(stx_config, layer)
|
||||
all_build_types = discovery.get_layer_build_types(distro=distro, layer=layer)
|
||||
if not all_build_types:
|
||||
logger.error(' '.join(['No build_types found for distro', distro, 'layer', layer]))
|
||||
return
|
||||
|
||||
if not build_types:
|
||||
build_types = all_build_types
|
||||
|
||||
for build_type in build_types:
|
||||
if build_type not in all_build_types:
|
||||
logger.warning(' '.join([build_type, 'is not a valid build_type for distro', distro, 'of layer', layer]))
|
||||
continue
|
||||
|
||||
pattern=''.join(['os-',build_type,'.lst'])
|
||||
for root, dirs, files in os.walk(search_dir):
|
||||
for f in fnmatch.filter(files, pattern):
|
||||
bin_list.append(os.path.join(root, f))
|
||||
|
||||
search_dir = os.path.join(stx_config, 'common')
|
||||
pattern='base-*.lst'
|
||||
for root, dirs, files in os.walk(search_dir):
|
||||
for f in fnmatch.filter(files, pattern):
|
||||
bin_list.append(os.path.join(root, f))
|
||||
|
||||
logger.info("bin_list=%s" % bin_list)
|
||||
return bin_list
|
||||
|
||||
|
||||
@ -147,21 +184,21 @@ class BaseDownloader():
|
||||
ret = 0
|
||||
if len(self.dl_need):
|
||||
logger.info("++++++++++++++++++++++++++++++++++++++++++++++++++")
|
||||
logger.info("All packages need to be downloaded: %d", len(self.dl_need))
|
||||
logger.info("Total number of packages needing to be downloaded: %d", len(self.dl_need))
|
||||
|
||||
if len(self.dl_success):
|
||||
logger.info("++++++++++++++++++++++++++++++++++++++++++++++++++")
|
||||
logger.info("Successfully downloaded packages: %d", len(self.dl_success))
|
||||
for dlobj in sorted(self.dl_success):
|
||||
logger.info(dlobj.strip())
|
||||
logger.info(' '.join(['-', dlobj.strip()]))
|
||||
|
||||
failed_list = list(set(self.dl_need) - set(self.dl_success))
|
||||
if len(failed_list):
|
||||
logger.error("+++++++++++++++++++++++++++++++++++++++++++++++++")
|
||||
logger.error("Failed to download packages %d", len(failed_list))
|
||||
logger.error("Failed to download packages: %d", len(failed_list))
|
||||
ret = 1
|
||||
for dlobj in sorted(failed_list):
|
||||
logger.error(dlobj.strip())
|
||||
logger.error(' '.join([dlobj.strip()]))
|
||||
return ret
|
||||
|
||||
|
||||
@ -224,7 +261,7 @@ class DebDownloader(BaseDownloader):
|
||||
pkg_ver = pkg_name_array[1].split(":")[-1]
|
||||
# current default arch is 'amd64'
|
||||
pname_arch = '_'.join([pkg_name, pkg_ver, self.arch]) + '.deb'
|
||||
pname_all = ''.join([pkg_name, '_', pkg_ver, '_all.deb'])
|
||||
pname_all = '_'.join([pkg_name, pkg_ver, 'all']) + '.deb'
|
||||
self.dl_need.append(pkg_name + '_' + pkg_ver)
|
||||
|
||||
if self.downloaded and pname_arch in self.downloaded:
|
||||
@ -245,7 +282,13 @@ class DebDownloader(BaseDownloader):
|
||||
# should be defined in the package list file with ':'
|
||||
self.need_download.append(pkg_name + '_' + pkg_name_array[1])
|
||||
|
||||
previously_uploaded = self.repomgr.list_pkgs(REPO_BIN)
|
||||
logger.info(' '.join(['previously_uploaded', str(previously_uploaded)]))
|
||||
for deb in self.need_upload:
|
||||
if previously_uploaded and deb in previously_uploaded:
|
||||
logger.info(' '.join([os.path.join(stx_bin_mirror, deb),
|
||||
'has already been uploaded to', REPO_BIN, ', skip']))
|
||||
continue
|
||||
name, ver, arch = deb.split('_')
|
||||
if not self.repomgr.search_pkg(REPO_BIN, name, ver, True):
|
||||
if name and ver:
|
||||
@ -257,7 +300,7 @@ class DebDownloader(BaseDownloader):
|
||||
logger.info(' '.join([os.path.join(stx_bin_mirror, deb),
|
||||
'fail to uploaded to', REPO_BIN]))
|
||||
for deb in self.need_download:
|
||||
logger.debug(' '.join(['package', deb, 'is need to be downloaded']))
|
||||
logger.debug(' '.join(['package', deb, 'needs to be downloaded']))
|
||||
debnames = deb.split('_')
|
||||
ret = self.download(debnames[0], debnames[1])
|
||||
if ret:
|
||||
@ -319,29 +362,58 @@ class SrcDownloader(BaseDownloader):
|
||||
return False
|
||||
return True
|
||||
|
||||
def download_all(self):
|
||||
pkgs_list = []
|
||||
pkgs_all = get_all_stx_pkgs()
|
||||
for pkg in pkgs_all.keys():
|
||||
pkgs_list.append(pkg)
|
||||
self.dl_need.append(pkg)
|
||||
if not len(pkgs_list):
|
||||
logger.info("All source packages are already in mirror")
|
||||
def download_all(self, distro=STX_DEFAULT_DISTRO, layers=None, build_types=None):
|
||||
logger.info("download_all, layers=%s, build_types=%s" % (layers, build_types))
|
||||
if layers:
|
||||
for layer in layers:
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join([layer, 'is not a valid layer']))
|
||||
return
|
||||
else:
|
||||
logger.info("Start to download source packages: %d", len(pkgs_list))
|
||||
logger.info("%s", sorted(pkgs_list))
|
||||
for pkg in sorted(pkgs_list):
|
||||
if self.download_pkg_src(pkgs_all[pkg]):
|
||||
self.dl_success.append(pkg)
|
||||
else:
|
||||
self.dl_failed.append(pkg)
|
||||
layers = ALL_LAYERS
|
||||
|
||||
def start(self):
|
||||
pkg_dirs = []
|
||||
|
||||
for layer in layers:
|
||||
all_build_types = discovery.get_layer_build_types(distro=distro, layer=layer)
|
||||
if not all_build_types:
|
||||
logger.error(' '.join(['No build_types found for distro', distro, 'layer', layer]))
|
||||
return
|
||||
|
||||
if not build_types:
|
||||
build_types = all_build_types
|
||||
|
||||
for build_type in build_types:
|
||||
if build_type not in all_build_types:
|
||||
logger.warning(' '.join([build_type, 'is not a valid build_type for distro', distro, 'of layer', layer]))
|
||||
continue
|
||||
|
||||
pkg_dirs.extend(discovery.package_dir_list(distro=distro, layer=layer, build_type=build_type))
|
||||
|
||||
if not len(pkg_dirs):
|
||||
logger.info("No source packages found")
|
||||
return
|
||||
|
||||
pkg_dirs_to_names = discovery.package_dirs_to_names_dict(pkg_dirs, distro=distro)
|
||||
for pkg_dir in pkg_dirs_to_names:
|
||||
self.dl_need.append(pkg_dirs_to_names[pkg_dir])
|
||||
|
||||
logger.info("Starting to download %d source packages", len(pkg_dirs))
|
||||
logger.info("%s", sorted(self.dl_need))
|
||||
for pkg_dir in pkg_dirs:
|
||||
if self.download_pkg_src(pkg_dir):
|
||||
if pkg_dir in pkg_dirs_to_names:
|
||||
self.dl_success.append(pkg_dirs_to_names[pkg_dir])
|
||||
else:
|
||||
if pkg_dir in pkg_dirs_to_names:
|
||||
self.dl_failed.append(pkg_dirs_to_names[pkg_dir])
|
||||
|
||||
def start(self, distro=STX_DEFAULT_DISTRO, layers=None, build_types=None):
|
||||
# stx package source downloading
|
||||
super(SrcDownloader, self).clean()
|
||||
|
||||
if self.prepare():
|
||||
self.download_all()
|
||||
self.download_all(distro=distro, layers=layers, build_types=build_types)
|
||||
else:
|
||||
logger.error("Failed to initialize source downloader")
|
||||
sys.exit(1)
|
||||
@ -370,6 +442,9 @@ if __name__ == "__main__":
|
||||
source_dl = None
|
||||
binary_ret = 0
|
||||
source_ret = 0
|
||||
distro = STX_DEFAULT_DISTRO
|
||||
layers = None
|
||||
build_types = None
|
||||
|
||||
parser = argparse.ArgumentParser(description="downloader helper")
|
||||
parser.add_argument('-b', '--download_binary', help="download binary debs",
|
||||
@ -378,13 +453,50 @@ if __name__ == "__main__":
|
||||
action='store_true')
|
||||
parser.add_argument('-c', '--clean_mirror', help="clean the whole mirror and download again, be careful to use",
|
||||
action='store_true')
|
||||
parser.add_argument('-d', '--distro', type=str, nargs=1,
|
||||
help="name of the distro to build\n %s" % ALL_DISTROS,
|
||||
default=STX_DEFAULT_DISTRO, required=False)
|
||||
parser.add_argument('-B', '--build-types', type=str,
|
||||
help="comma separated list of all build-types to build\n %s" % ALL_BUILD_TYPES,
|
||||
default=None, required=False)
|
||||
parser.add_argument('-l', '--layers', type=str,
|
||||
help="comma separated list of all layers to build\n %s" % ALL_LAYERS,
|
||||
default=None, required=False)
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
clean_mirror = args.clean_mirror
|
||||
|
||||
if args.distro:
|
||||
if args.distro not in ALL_DISTROS:
|
||||
logger.error(' '.join(['Distro', args.distro, 'not in', ','.join(ALL_DISTROS)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
distro = args.distro
|
||||
ALL_LAYERS = discovery.get_all_layers(distro=distro)
|
||||
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=distro)
|
||||
|
||||
if args.build_types:
|
||||
build_types = args.build_types.strip().split(',')
|
||||
for build_type in build_types:
|
||||
if build_type not in ALL_BUILD_TYPES:
|
||||
logger.error(' '.join(['Build_type', build_type, 'not in', ','.join(ALL_BUILD_TYPES)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
|
||||
if args.layers:
|
||||
layers = args.layers.strip().split(',')
|
||||
for layer in layers:
|
||||
logger.info("layer=%s" % layer)
|
||||
if layer not in ALL_LAYERS:
|
||||
logger.error(' '.join(['Layer', layer, 'not in', ','.join(ALL_LAYERS)]))
|
||||
logger.error("Please consult: build-pkgs --help")
|
||||
sys.exit(1)
|
||||
|
||||
if args.download_binary:
|
||||
all_binary_lists = get_all_binary_list()
|
||||
all_binary_lists = get_all_binary_list(distro=distro, layers=layers, build_types=build_types)
|
||||
binary_dl = DebDownloader(DEFAULT_ARCH, stx_bin_mirror, clean_mirror, all_binary_lists)
|
||||
|
||||
if args.download_source:
|
||||
source_dl = SrcDownloader(DEFAULT_ARCH, stx_src_mirror, clean_mirror)
|
||||
|
||||
@ -392,7 +504,7 @@ if __name__ == "__main__":
|
||||
if binary_dl:
|
||||
binary_dl.start()
|
||||
if source_dl:
|
||||
source_dl.start()
|
||||
source_dl.start(distro=distro, layers=layers, build_types=build_types)
|
||||
|
||||
if binary_dl:
|
||||
binary_ret = binary_dl.reports()
|
||||
|
62
build-tools/stx/git_utils.py
Normal file
62
build-tools/stx/git_utils.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright (c) 2021 Wind River Systems, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import utils
|
||||
|
||||
#
|
||||
# git_list [<dir>]:
|
||||
# Return a list of git root directories found under <dir>
|
||||
#
|
||||
def git_list (dir=os.environ['PWD'], max_depth=5):
|
||||
matches = []
|
||||
if dir is None:
|
||||
return matches
|
||||
if not os.path.isdir(dir):
|
||||
return matches
|
||||
for root, dirnames, filenames in utils.limited_walk(dir, max_depth=max_depth):
|
||||
# Look for .git directories
|
||||
for dirname in fnmatch.filter(dirnames, '.git'):
|
||||
# Exclude .repo/repo
|
||||
if os.path.basename(root) == "repo":
|
||||
if os.path.basename(os.path.dirname(root)) == ".repo":
|
||||
continue
|
||||
matches.append(root)
|
||||
continue
|
||||
for filename in fnmatch.filter(filenames, '.git'):
|
||||
matches.append(root)
|
||||
continue
|
||||
return matches
|
||||
|
||||
|
||||
# git_root [<dir>]:
|
||||
# Return the root directory of a git
|
||||
# Note: symlinks are fully expanded.
|
||||
#
|
||||
|
||||
def git_root (dir=os.environ['PWD']):
|
||||
if dir is None:
|
||||
return None
|
||||
if not os.path.isdir(dir):
|
||||
# Parhaps a file, try the parent directory of the file.
|
||||
dir = os.path.dirname(dir)
|
||||
if not os.path.isdir(dir):
|
||||
return None
|
||||
while dir != "/":
|
||||
if os.path.isdir(os.path.join(dir, ".git")):
|
||||
return os.path.normpath(dir)
|
||||
dir = os.path.dirname(dir)
|
||||
return None
|
||||
|
36
build-tools/stx/repo_utils.py
Normal file
36
build-tools/stx/repo_utils.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2021 Wind River Systems, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
# repo_root [<dir>]:
|
||||
# Return the root directory of a repo.
|
||||
# Note: symlinks are fully expanded.
|
||||
#
|
||||
|
||||
def repo_root (dir=os.environ['PWD']):
|
||||
if dir is None:
|
||||
return None
|
||||
if not os.path.isdir(dir):
|
||||
# Parhaps a file, try the parent directory of the file.
|
||||
dir = os.path.dirname(dir)
|
||||
if not os.path.isdir(dir):
|
||||
return None
|
||||
while dir != "/":
|
||||
if os.path.isdir(os.path.join(dir, ".repo")):
|
||||
return os.path.normpath(dir)
|
||||
dir = os.path.dirname(dir)
|
||||
return None
|
||||
|
@ -13,7 +13,9 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
# Copyright (C) 2021 Wind River Systems,Inc
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
||||
def set_logger(logger):
|
||||
@ -67,3 +69,44 @@ def set_logger(logger):
|
||||
fh.setFormatter(ColorFormatter())
|
||||
logger.addHandler(fh)
|
||||
logger.propagate = 0
|
||||
|
||||
|
||||
# Read file 'lst_file', sprip out blank lines and lines starting with '#'.
|
||||
# Return the remaining lines as a list. Optionally subject the lines
|
||||
# to additional processing via the entry_handler prior to inclusion in
|
||||
# the list
|
||||
def bc_safe_fetch(lst_file, entry_handler=None, entry_handler_arg=None):
|
||||
entries = []
|
||||
try:
|
||||
with open(lst_file, 'r') as flist:
|
||||
lines = list(line for line in (p.strip() for p in flist) if line)
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
else:
|
||||
for entry in lines:
|
||||
entry = entry.strip()
|
||||
if entry.startswith('#'):
|
||||
continue
|
||||
if entry == "":
|
||||
continue
|
||||
if entry_handler:
|
||||
if entry_handler_arg:
|
||||
entries.extend(entry_handler(entry, entry_handler_arg))
|
||||
else:
|
||||
entries.extend(entry_handler(entry))
|
||||
else:
|
||||
entries.append(entry)
|
||||
return entries
|
||||
|
||||
|
||||
def limited_walk(dir, max_depth=1):
|
||||
dir = dir.rstrip(os.path.sep)
|
||||
assert os.path.isdir(dir)
|
||||
num_sep_dir = dir.count(os.path.sep)
|
||||
for root, dirs, files in os.walk(dir):
|
||||
yield root, dirs, files
|
||||
num_sep_root = root.count(os.path.sep)
|
||||
if num_sep_dir + max_depth <= num_sep_root:
|
||||
del dirs[:]
|
||||
|
Loading…
Reference in New Issue
Block a user