211250829b
This commit did the below things: a. Added code to delete the debian source package from the local source repo each time before uploading the new package. b. Added more detailed log messages to show the search/delete/upload operations with repo_manager c. Added log module 'repo_manager' to repo_manager Story: 2008846 Task: 44005 Signed-off-by: hbai <haiqing.bai@windriver.com> Change-Id: Ie44eb53bfca1226719ec5784da01a5f5fde5b2e1
787 lines
29 KiB
Python
Executable File
787 lines
29 KiB
Python
Executable File
#!/usr/bin/python3
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
# Copyright (C) 2021 Wind River Systems,Inc
|
|
|
|
import argparse
|
|
import debrepack
|
|
import debsentry
|
|
import dsc_depend
|
|
import dsccache
|
|
import logging
|
|
import os
|
|
import repo_manage
|
|
import requests
|
|
import shutil
|
|
import signal
|
|
import subprocess
|
|
import sys
|
|
import time
|
|
import utils
|
|
|
|
BUILDER_URL = os.environ.get('BUILDER_URL')
|
|
REPOMGR_URL = os.environ.get('REPOMGR_URL')
|
|
BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
|
|
STX_ROOT = os.environ.get('MY_REPO_ROOT_DIR')
|
|
PKGBUILDER_ROOT = "/localdisk/pkgbuilder"
|
|
USER = os.environ.get('MYUNAME')
|
|
PROJECT = os.environ.get('PROJECT')
|
|
REPO_BUILD = 'deb-local-build'
|
|
REPO_SOURCE = 'deb-local-source'
|
|
# Listed all stx source layers which contains 'debian_pkg_dirs'
|
|
STX_SOURCE_REPOS = ['ansible-playbooks', 'config', 'fault',
|
|
'integ', 'update', 'utilities', 'audit-armada-app',
|
|
'cert-manager-armada-app', 'clients',
|
|
'compile', 'config-files', 'containers',
|
|
'distributedcloud', 'distributedcloud-client',
|
|
'ha', 'helm-charts', 'kernel', 'metal',
|
|
'metrics-server-armada-app', 'monitor-armada-app',
|
|
'monitoring', 'nfv', 'nginx-ingress-controller-armada-app',
|
|
'oidc-auth-armada-app', 'openstack-armada-app',
|
|
'platform-armada-app', 'portieris-armada-app',
|
|
'ptp-notification-armada-app', 'rook-ceph',
|
|
'SDO-rv-service', 'snmp-armada-app', 'stx-puppet',
|
|
'upstream', 'vault-armada-app']
|
|
STX_LAYERS = ['distro', 'flock']
|
|
|
|
logger = logging.getLogger('debcontroller')
|
|
utils.set_logger(logger)
|
|
|
|
|
|
def get_pkgname_with_dsc(dscs, dsc_path):
|
|
for package, dsc in dscs.items():
|
|
if dsc.strip() in dsc_path:
|
|
return package
|
|
return None
|
|
|
|
|
|
def get_pkgname_ver_with_deb(deb_name):
|
|
if not deb_name.endswith('.deb'):
|
|
return None
|
|
name_list = deb_name.split('_')
|
|
if len(name_list) < 2:
|
|
return None
|
|
return name_list[0], name_list[1]
|
|
|
|
|
|
def req_chroots_action(action, extra_params):
|
|
"""
|
|
Base function called by each require on chroot with Restful API
|
|
Param:
|
|
action: addchroot, loadchroot, savechroot
|
|
"""
|
|
req_params = {}
|
|
req_params['project'] = PROJECT
|
|
req_params['user'] = USER
|
|
|
|
if extra_params:
|
|
req_params.update(extra_params)
|
|
try:
|
|
resp = requests.get(BUILDER_URL + action, data=req_params)
|
|
resp.raise_for_status()
|
|
except requests.RequestException as e:
|
|
print(e)
|
|
else:
|
|
logger.debug(resp.text)
|
|
if 'success' in resp.text:
|
|
return 'success'
|
|
if 'exists' in resp.text:
|
|
return 'success'
|
|
if 'creating' in resp.text:
|
|
return 'creating'
|
|
return 'fail'
|
|
|
|
|
|
def show_task_log(log_file, wait_time, success_str, exit_str):
|
|
"""
|
|
Display the log file on the current console
|
|
Param:
|
|
wait_time: customer defines to wait before the log file can be read
|
|
key_str: the separate string can be taken as flag to exit
|
|
"""
|
|
status = 'fail'
|
|
time.sleep(wait_time)
|
|
logger.debug(' '.join(['Waiting for log file', log_file]))
|
|
|
|
timeout = 8
|
|
time_counter = 0
|
|
while not os.path.exists(log_file):
|
|
time.sleep(1)
|
|
time_counter += 1
|
|
if time_counter > timeout:
|
|
break
|
|
|
|
if os.path.exists(log_file):
|
|
p = subprocess.Popen("tail -f " + log_file, stdout=subprocess.PIPE,
|
|
shell=True, universal_newlines=True, bufsize=0)
|
|
while p.poll() is None:
|
|
line = p.stdout.readline()
|
|
line = line.strip()
|
|
if line:
|
|
print(line)
|
|
if success_str and success_str in line:
|
|
status = 'success'
|
|
break
|
|
if exit_str and exit_str in line:
|
|
logger.error(' '.join(['Task failed. For details please',
|
|
'consult log', log_file]))
|
|
status = 'fail'
|
|
break
|
|
return status
|
|
|
|
|
|
def bc_safe_fetch(dst_file, entry_handler=None):
|
|
entries = []
|
|
try:
|
|
with open(dst_file, 'r') as flist:
|
|
lines = list(line for line in (p.strip() for p in flist) if line)
|
|
except IOError as e:
|
|
logger.error(str(e))
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
else:
|
|
for entry in lines:
|
|
entry = entry.strip()
|
|
if entry.startswith('#'):
|
|
continue
|
|
if entry_handler:
|
|
entries.append(entry_handler(entry))
|
|
else:
|
|
entries.append(entry)
|
|
return entries
|
|
|
|
|
|
def pkgdirs_entry_handler(entry):
|
|
if entry:
|
|
return os.path.basename(entry)
|
|
return []
|
|
|
|
|
|
def get_pkgs_of_layer(layer):
|
|
"""
|
|
Scan all STX source layers to get all buildable packages of layer
|
|
debian_build_layer.cfg defines whether the STX source layer belongs
|
|
to 'distor' or 'flock' or other layer
|
|
Params: None
|
|
Return:
|
|
List of all STX buildable packages of layer
|
|
"""
|
|
pkgs = []
|
|
stx_src_root = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
|
|
'cgcs-root/stx')
|
|
for root, dirs, files in os.walk(stx_src_root):
|
|
if dirs:
|
|
pass
|
|
for r in files:
|
|
if r == 'debian_build_layer.cfg':
|
|
layers = []
|
|
layer_file = os.path.join(root, r)
|
|
layers.extend(bc_safe_fetch(layer_file, None))
|
|
if layer in layers:
|
|
# The current STX src layer belongs to 'layer'
|
|
pkgs_f = os.path.join(root, 'debian_pkg_dirs')
|
|
msg = ' '.join(['Pkgdirs', pkgs_f, 'for layer', layer])
|
|
logger.debug(msg)
|
|
pkgs.extend(bc_safe_fetch(pkgs_f, pkgdirs_entry_handler))
|
|
|
|
return pkgs
|
|
|
|
|
|
def get_all_packages():
|
|
"""
|
|
Scan all STX source layers to get all buildable packages
|
|
Params: None
|
|
Return:
|
|
List of all STX buildable packages
|
|
"""
|
|
pkgs = []
|
|
stx_src_root = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
|
|
'cgcs-root/stx')
|
|
for root, dirs, files in os.walk(stx_src_root):
|
|
if dirs:
|
|
pass
|
|
for r in files:
|
|
if r == 'debian_pkg_dirs':
|
|
pkgs_file = os.path.join(root, r)
|
|
pkgs.extend(bc_safe_fetch(pkgs_file, pkgdirs_entry_handler))
|
|
return pkgs
|
|
|
|
|
|
def fetch_debian_folder(package):
|
|
for layer in STX_SOURCE_REPOS:
|
|
pkg_dir_file = os.path.join(STX_ROOT, 'cgcs-root/stx', layer,
|
|
'debian_pkg_dirs')
|
|
if not os.path.exists(pkg_dir_file):
|
|
logger.warning('debian_pkg_dirs does not exist for layer %s, please check', layer)
|
|
continue
|
|
|
|
logger.debug(' '.join(['Fetching debian meta in', pkg_dir_file]))
|
|
try:
|
|
with open(pkg_dir_file, 'r') as fdir:
|
|
debs = list(line for line in (d.strip() for d in fdir) if line)
|
|
except IOError as e:
|
|
logger.error(str(e))
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
else:
|
|
for deb in debs:
|
|
deb = deb.strip()
|
|
if deb.startswith('#'):
|
|
continue
|
|
if os.path.basename(deb) == package:
|
|
msg = ' '.join(['Meta of', package, 'in', deb])
|
|
logger.debug(msg)
|
|
return os.path.join(STX_ROOT, 'cgcs-root/stx', layer, deb)
|
|
return None
|
|
|
|
|
|
class BuildController():
|
|
"""
|
|
builderClient helps to create or refresh the debian build recipes
|
|
(.dsc, *.tar) based on the stx source, then it offloads the build
|
|
task to the container 'pkgbuilder' with customer's build options
|
|
The build log will be displayed on console until getting the result
|
|
'Status: success': build ok
|
|
'Status: fail': build fail
|
|
'Status: give-back': try again later
|
|
"""
|
|
def __init__(self):
|
|
self.attrs = {
|
|
'mode': 'private',
|
|
'type': 'std',
|
|
'avoid': True,
|
|
'parallel': False,
|
|
'exit_on_fail': False,
|
|
'run_tests': False
|
|
}
|
|
self.kits = {
|
|
'dsc_cache': None,
|
|
'repo_mgr': None,
|
|
'dsc_maker': None
|
|
}
|
|
self.lists = {
|
|
'success': [],
|
|
'fail': [],
|
|
'uploaded': [],
|
|
}
|
|
self.pkgs_digests = {}
|
|
if not self.kits['repo_mgr']:
|
|
rlogger = logging.getLogger('repo_manager')
|
|
utils.set_logger(rlogger)
|
|
self.kits['repo_mgr'] = repo_manage.RepoMgr('aptly', REPOMGR_URL,
|
|
'/tmp', rlogger)
|
|
logger.debug("Successful created repo manager")
|
|
|
|
@property
|
|
def build_avoid(self):
|
|
return self.attrs['avoid']
|
|
|
|
@build_avoid.setter
|
|
def build_avoid(self, avoid):
|
|
self.attrs['avoid'] = avoid
|
|
|
|
def start(self):
|
|
if not self.kits['dsc_cache']:
|
|
pkl_file = os.path.join(BUILD_ROOT, self.attrs['type'], 'dsc.pkl')
|
|
self.kits['dsc_cache'] = dsccache.DscCache(logger, pkl_file)
|
|
if not self.kits['dsc_cache']:
|
|
logger.warning(' '.join(['Failed to create dsc cache',
|
|
pkl_file]))
|
|
|
|
if not self.kits['repo_mgr']:
|
|
logger.critical("Failed to create repo manager")
|
|
return False
|
|
self.kits['repo_mgr'].upload_pkg(REPO_BUILD, None)
|
|
self.kits['repo_mgr'].upload_pkg(REPO_SOURCE, None)
|
|
|
|
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
|
os.makedirs(build_dir, exist_ok=True)
|
|
|
|
recipes_dir = os.path.join(BUILD_ROOT, 'recipes')
|
|
os.makedirs(recipes_dir, exist_ok=True)
|
|
|
|
if not self.kits['dsc_maker']:
|
|
try:
|
|
self.kits['dsc_maker'] = debrepack.Parser(build_dir,
|
|
recipes_dir, 'debug')
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
logger.error("Failed to create dsc maker")
|
|
return False
|
|
else:
|
|
logger.info("Successfully created dsc maker")
|
|
|
|
# load the persistent chroot on shared volume
|
|
req_chroots_action('loadchroot', None)
|
|
return True
|
|
|
|
def stop(self):
|
|
self.show_build_stats()
|
|
|
|
def clean(self):
|
|
"""
|
|
Clean the build env includes cleaning all these build artifacts under
|
|
<path to>/std or <path to>/rt and empty the local build repo
|
|
"""
|
|
# clean build artifacts
|
|
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
|
if os.path.exists(build_dir):
|
|
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
|
|
try:
|
|
shutil.rmtree(build_dir)
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
logger.error("Failed to clean of the build directory")
|
|
else:
|
|
logger.info("Finished cleaning of the build directory")
|
|
|
|
# clean build repo
|
|
if self.kits['repo_mgr']:
|
|
if not self.kits['repo_mgr'].remove_repo(REPO_BUILD):
|
|
logger.debug(' '.join(['Failed to clean', REPO_BUILD]))
|
|
else:
|
|
logger.debug(' '.join(['Successfully cleaned', REPO_BUILD]))
|
|
|
|
def add_chroot(self, mirror):
|
|
extra_req = {}
|
|
|
|
if mirror:
|
|
# Extra required data can be extended here, for example:
|
|
# req_param['mirror'] = "http://ftp.de.debian.org/debian"
|
|
# when 'addchroot'
|
|
extra_req['mirror'] = mirror
|
|
|
|
ret = req_chroots_action('addchroot', extra_req)
|
|
if 'success' in ret:
|
|
logger.debug('Chroot exists, ready to build')
|
|
return 'success'
|
|
|
|
if 'creating' in ret:
|
|
key_string = "Successfully set up bullseye chroot"
|
|
state = show_task_log(os.path.join(PKGBUILDER_ROOT, USER, PROJECT,
|
|
'chroot.log'),
|
|
10, key_string, None)
|
|
if 'success' in state:
|
|
req_chroots_action('savechroot', None)
|
|
ret = 'success'
|
|
else:
|
|
logger.error('Failed to add chroot, please consult the log')
|
|
ret = 'fail'
|
|
self.req_kill_task('chroot')
|
|
|
|
return ret
|
|
|
|
def upload_with_deb(self, package, debs_dir):
|
|
"""
|
|
upload the local build debian binaries to repo manager
|
|
Params:
|
|
package: target package name
|
|
debs_dir: the directory to debian binaries
|
|
"""
|
|
logger.debug(' '.join(['Remove all old version of debs for', package]))
|
|
debs_clue = os.path.join(os.environ.get('MY_BUILD_PKG_DIR'),
|
|
'debs_entry.pkl')
|
|
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
|
|
if subdebs:
|
|
for deb in subdebs:
|
|
pkg_item = deb.split('_')
|
|
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
|
|
|
|
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
|
|
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0],
|
|
pkg_item[1]):
|
|
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
|
|
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD,
|
|
pkg_item[0], 'binary',
|
|
pkg_item[1]):
|
|
logger.info('Successfully deleted binary %s from repository %s',
|
|
msg, REPO_BUILD)
|
|
else:
|
|
logger.info('Failed to delete binary %s from repository %s', msg,
|
|
REPO_BUILD)
|
|
sdebs = []
|
|
if not os.path.exists(debs_dir):
|
|
logger.error(' '.join(['Noneexistent directory', debs_dir]))
|
|
return False
|
|
for root, dirs, files in os.walk(debs_dir):
|
|
if dirs:
|
|
pass
|
|
for r in files:
|
|
if r.endswith('.deb'):
|
|
deb_file = os.path.join(root, r)
|
|
if self.kits['repo_mgr'].upload_pkg(REPO_BUILD, deb_file):
|
|
logger.info(' '.join(['Successfully uploaded',
|
|
deb_file, 'to repository', REPO_BUILD]))
|
|
pkg_item = r.split('_')
|
|
sdebs.append(''.join([pkg_item[0], '_', pkg_item[1]]))
|
|
msg = ''.join([pkg_item[0], '_', pkg_item[1],
|
|
' is saved to debs_entry for ',
|
|
package])
|
|
logger.debug(msg)
|
|
else:
|
|
logger.error(' '.join(['Failed to upload', deb_file,
|
|
'to repository', REPO_BUILD]))
|
|
return False
|
|
debsentry.set_subdebs(debs_clue, package, sdebs, logger)
|
|
return True
|
|
|
|
def upload_with_dsc(self, deb, dsc, repo_name):
|
|
if not os.path.exists(dsc):
|
|
logger.error(' '.join(['Dsc file', dsc, 'does not exist']))
|
|
return False
|
|
|
|
dsc_pkg = os.path.basename(dsc).split('_')[0]
|
|
if deb != dsc_pkg:
|
|
logger.warning(''.join(['Package name passed in is ', deb,
|
|
', from dsc is ', dsc_pkg, ' ,did not match.']))
|
|
logger.info(' '.join(['Existing source for', dsc_pkg,
|
|
'will be deleted from repository', repo_name, 'before new source is uploaded']))
|
|
logger.info("Searching for %s in repository %s", dsc_pkg, repo_name)
|
|
if self.kits['repo_mgr'].search_pkg(repo_name, dsc_pkg, None, False):
|
|
logger.info("Found %s in repository %s, attempting to delete", dsc_pkg, repo_name)
|
|
if not self.kits['repo_mgr'].delete_pkg(repo_name, dsc_pkg, 'source'):
|
|
logger.error("Failed to delete source %s from repository %s", dsc_pkg, repo_name)
|
|
return False
|
|
logger.info("Successfully deleted source %s from repository %s", dsc_pkg, repo_name)
|
|
else:
|
|
logger.info("can't find %s in repository %s", dsc_pkg, repo_name)
|
|
|
|
logger.info(' '.join(['Start to upload source', dsc, 'to repository', repo_name]))
|
|
if not self.kits['repo_mgr'].upload_pkg(repo_name, dsc):
|
|
logger.error("Failed to upload source %s to repository %s", dsc, repo_name)
|
|
return False
|
|
logger.info("Successfully uploaded source %s to repository %s", dsc, repo_name)
|
|
return True
|
|
|
|
def req_add_task(self, package, dsc_path):
|
|
status = 'fail'
|
|
dsc = os.path.basename(dsc_path)
|
|
|
|
req_params = {}
|
|
req_params['mode'] = self.attrs['mode']
|
|
req_params['type'] = self.attrs['type']
|
|
req_params['project'] = PROJECT
|
|
req_params['user'] = USER
|
|
req_params['name'] = package
|
|
req_params['dsc'] = dsc
|
|
req_params['run_tests'] = self.attrs['run_tests']
|
|
|
|
try:
|
|
resp = requests.get(BUILDER_URL + 'addtask', data=req_params)
|
|
resp.raise_for_status()
|
|
except requests.RequestException as e:
|
|
print(e)
|
|
else:
|
|
logger.debug(resp.text)
|
|
if 'success' in resp.text:
|
|
log = os.path.join(BUILD_ROOT, self.attrs['type'], package,
|
|
dsc.replace('.dsc', '_amd64.build'))
|
|
ret = show_task_log(log, 3, 'Status: successful',
|
|
'Finished at')
|
|
if 'success' in ret:
|
|
self.upload_with_deb(package, os.path.join(BUILD_ROOT,
|
|
self.attrs['type'], package))
|
|
self.req_kill_task('sbuild')
|
|
status = 'success'
|
|
return status
|
|
|
|
def req_kill_task(self, owner):
|
|
req_params = {}
|
|
req_params['owner'] = owner
|
|
req_params['user'] = USER
|
|
req_params['mode'] = self.attrs['mode']
|
|
|
|
try:
|
|
resp = requests.get(BUILDER_URL + 'killtask', data=req_params)
|
|
resp.raise_for_status()
|
|
except requests.RequestException as e:
|
|
print(e)
|
|
else:
|
|
logger.debug(resp.text)
|
|
|
|
def req_stop_task(self):
|
|
req_params = {}
|
|
req_params['user'] = USER
|
|
req_params['mode'] = self.attrs['mode']
|
|
|
|
try:
|
|
resp = requests.get(BUILDER_URL + 'stoptask', data=req_params)
|
|
resp.raise_for_status()
|
|
except requests.RequestException as e:
|
|
print(e)
|
|
else:
|
|
logger.debug(resp.text)
|
|
|
|
def create_dsc(self, package, pkg_meta):
|
|
"""
|
|
Call dsc maker(debrepack) to generate the new dsc for package
|
|
Params:
|
|
package: package name
|
|
pkg_meta: path to the package's debian folder
|
|
Return: result list like:
|
|
['dhcp-2.10.1.tis.dsc' 'dhcp-2.10.tar.xz' 'dhcp-2.10.tar.xz.orig']
|
|
"""
|
|
skip_build = False
|
|
# Check whether there are changes on package's debian folder
|
|
new_checksum = self.kits['dsc_maker'].checksum(pkg_meta)
|
|
self.pkgs_digests[package] = new_checksum
|
|
if self.kits['dsc_cache']:
|
|
old_checksum = self.kits['dsc_cache'].get_package_digest(package)
|
|
if old_checksum and old_checksum == new_checksum:
|
|
logger.info(' '.join(['No source meta changes of', package]))
|
|
skip_build = True
|
|
|
|
if self.attrs['avoid'] and skip_build:
|
|
self.lists['success'].append(package)
|
|
logger.info(' '.join(['Skip build', package, 'again']))
|
|
logger.info(' '.join(['Force to build, please use -c/--clean']))
|
|
return None
|
|
|
|
logger.debug(' '.join([pkg_meta, 'is ready to create dsc']))
|
|
|
|
pkgdir = os.path.join(BUILD_ROOT, self.attrs['type'], package)
|
|
if os.path.exists(pkgdir):
|
|
try:
|
|
shutil.rmtree(pkgdir)
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
else:
|
|
logger.debug(' '.join(['Successfully removed old', pkgdir]))
|
|
os.makedirs(pkgdir)
|
|
|
|
try:
|
|
dsc_recipes = self.kits['dsc_maker'].package(pkg_meta)
|
|
except Exception as e:
|
|
logger.error(str(e))
|
|
return None
|
|
else:
|
|
if not dsc_recipes:
|
|
logger.error(' '.join(['Failed to create dsc for', package]))
|
|
return None
|
|
logger.debug(' '.join(['Successfully created dsc for', package]))
|
|
return dsc_recipes
|
|
|
|
def run_build_loop(self, pkgs_dsc):
|
|
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
|
dsc_list_file = os.path.join(build_dir, 'dsc.lst')
|
|
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, logger)
|
|
|
|
for p in range(len(pkgs_dsc)):
|
|
pkgs_can_build = deps_resolver.get_build_able_pkg(1)
|
|
for dsc in pkgs_can_build:
|
|
logger.info(' '.join(['Depends resolver tells to build',
|
|
os.path.basename(dsc)]))
|
|
package = get_pkgname_with_dsc(pkgs_dsc, dsc)
|
|
status = self.req_add_task(package, dsc)
|
|
if 'success' in status:
|
|
logger.info(' '.join(['Successfully built',
|
|
package]))
|
|
deps_resolver.pkg_accomplish(dsc)
|
|
self.lists['success'].append(package)
|
|
pkg_md5 = self.pkgs_digests[package]
|
|
self.kits['dsc_cache'].set_package_digest(package, pkg_md5)
|
|
else:
|
|
logger.info(' '.join(['Failed to build', package, str(p)]))
|
|
self.req_stop_task()
|
|
if self.attrs['exit_on_fail']:
|
|
return
|
|
|
|
logger.info("Build loop done, please check the stats")
|
|
|
|
def build_route(self, port, data):
|
|
if port == 'package':
|
|
self.build_packages(data)
|
|
if port == 'layer':
|
|
self.build_layers(data)
|
|
if build_port == 'all':
|
|
self.build_all()
|
|
|
|
def build_all(self):
|
|
packages = get_all_packages()
|
|
if packages:
|
|
logger.debug(''.join(['All packages:',
|
|
','.join(packages)]))
|
|
self.build_packages(packages)
|
|
else:
|
|
logger.error('Failed to get all buildable packages')
|
|
|
|
def build_layers(self, layers):
|
|
if not layers:
|
|
logger.error('Failed to get layers')
|
|
return
|
|
for layer in layers:
|
|
if layer not in STX_LAYERS:
|
|
logger.error(' '.join([layer, 'is not a valid layer']))
|
|
else:
|
|
logger.info(' '.join(['Start to build all packages in layer',
|
|
layer]))
|
|
packages = get_pkgs_of_layer(layer)
|
|
if packages:
|
|
logger.debug(''.join([layer, ' need packages:',
|
|
','.join(packages)]))
|
|
self.build_packages(packages)
|
|
else:
|
|
logger.error(' '.join(['Failed to get packages for layer',
|
|
layer]))
|
|
logger.info(' '.join(['Finished building packages in layer',
|
|
layer]))
|
|
|
|
return
|
|
|
|
def build_packages(self, packages):
|
|
fdsc_file = None
|
|
packages_dscs = {}
|
|
self.lists['fail'] = packages
|
|
|
|
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
|
|
os.makedirs(build_dir, exist_ok=True)
|
|
|
|
dscs_list_file = os.path.join(build_dir, 'dsc.lst')
|
|
logger.debug(' '.join(['Prepare', dscs_list_file, 'to deps_resolver']))
|
|
fdsc_file = open(dscs_list_file, 'w+')
|
|
fdsc_file.seek(0)
|
|
fdsc_file.truncate()
|
|
|
|
# Now check and create the debian meta one by one
|
|
for deb in packages:
|
|
dsc_file = ""
|
|
deb = deb.strip()
|
|
deb_meta_path = fetch_debian_folder(deb)
|
|
if not deb_meta_path:
|
|
logger.error(' '.join(['No debian meta found, skip', deb]))
|
|
continue
|
|
|
|
deb_recipes = self.create_dsc(deb, deb_meta_path)
|
|
if deb_recipes:
|
|
dsc_file = os.path.join(build_dir, deb, deb_recipes[0])
|
|
packages_dscs[deb.strip()] = dsc_file
|
|
fdsc_file.write(dsc_file + '\n')
|
|
if self.kits['repo_mgr']:
|
|
self.upload_with_dsc(deb, dsc_file, REPO_SOURCE)
|
|
else:
|
|
if self.attrs['exit_on_fail']:
|
|
if fdsc_file:
|
|
fdsc_file.close()
|
|
return
|
|
|
|
if fdsc_file:
|
|
fdsc_file.close()
|
|
|
|
# Start to build
|
|
if packages_dscs:
|
|
self.run_build_loop(packages_dscs)
|
|
else:
|
|
logger.info("No debian dsc files found")
|
|
|
|
def show_build_stats(self):
|
|
if len(self.lists['success']) > 0:
|
|
logger.info("Successfully built:")
|
|
for deb in sorted(self.lists['success']):
|
|
logger.info(deb)
|
|
|
|
failed_pkgs = list(set(self.lists['fail']) - set(self.lists['success']))
|
|
if len(failed_pkgs) > 0:
|
|
logger.error("Failed to build:")
|
|
for deb in sorted(failed_pkgs):
|
|
logger.error(deb)
|
|
logger.info("For the failure reason, you can check with:")
|
|
logger.info("\'cat /localdisk/builder.log | grep ERROR\' or")
|
|
logger.info("\'cat ${MY_WORKSPACE}/<std or rt>/<Failed package>/*.build\'")
|
|
|
|
|
|
def bc_signal_handler(signum, frame):
|
|
if not build_controller:
|
|
return
|
|
|
|
if frame:
|
|
logger.debug(' '.join(['Signal', str(signum), 'got']))
|
|
logger.debug('Request to stop building tasks')
|
|
build_controller.req_stop_task()
|
|
build_controller.show_build_stats()
|
|
logger.debug('Exit for user interruption')
|
|
sys.exit(1)
|
|
|
|
|
|
def bc_reg_signal_handler():
|
|
signal.signal(signal.SIGINT, bc_signal_handler)
|
|
signal.signal(signal.SIGHUP, bc_signal_handler)
|
|
signal.signal(signal.SIGTERM, bc_signal_handler)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
default_layer = 'distro'
|
|
build_port = 'all'
|
|
build_data = None
|
|
|
|
parser = argparse.ArgumentParser(description="build-pkgs helper")
|
|
parser.add_argument('-c', '--clean', help="Start a fresh building",
|
|
action='store_true')
|
|
parser.add_argument('-e', '--exit_on_fail', help="Exit for any fail",
|
|
action='store_true')
|
|
parser.add_argument('-t', '--test', help="Run package tests during build",
|
|
action='store_true')
|
|
|
|
# set mutually options pair for package build and layer build
|
|
build_group = parser.add_mutually_exclusive_group()
|
|
build_group.add_argument('-a', '--all', help="Packages with comma",
|
|
action='store_true')
|
|
build_group.add_argument('-l', '--layers', help="Layers with comma",
|
|
type=str)
|
|
build_group.add_argument('-p', '--packages', help="Packages with comma",
|
|
type=str)
|
|
args = parser.parse_args()
|
|
if args.packages:
|
|
build_port = 'package'
|
|
build_data = args.packages.strip().split(',')
|
|
else:
|
|
if args.layers:
|
|
build_port = 'layer'
|
|
build_data = args.layers.strip().split(',')
|
|
else:
|
|
if args.all:
|
|
build_port = 'all'
|
|
build_data = None
|
|
else:
|
|
logger.error("Please consult: build-pkgs --help")
|
|
sys.exit(1)
|
|
|
|
build_controller = BuildController()
|
|
if args.clean:
|
|
build_controller.build_avoid = False
|
|
if build_port == 'all':
|
|
build_controller.clean()
|
|
if args.exit_on_fail:
|
|
build_controller.attrs['exit_on_fail'] = True
|
|
if args.test:
|
|
build_controller.attrs['run_tests'] = True
|
|
|
|
if not build_controller.start():
|
|
logger.critical("Fail to initialize build controller, exit ......")
|
|
sys.exit(1)
|
|
|
|
bc_reg_signal_handler()
|
|
|
|
# mirror can be set to add_chroot as the main package repo
|
|
# e.g http://ftp.de.debian.org/debian
|
|
if build_controller.add_chroot(None) != 'success':
|
|
pkgbuilder_log = '/localdisk/pkgbuilder/pkgbuilder.log'
|
|
logger.error(' '.join(['Chroot is not ready, please check',
|
|
pkgbuilder_log]))
|
|
sys.exit(1)
|
|
|
|
build_controller.build_route(build_port, build_data)
|
|
build_controller.stop()
|
|
|
|
logger.info("Build controller done")
|