downloader: save file names of required downloads
Downloader scans DEB image list files and meta_data.yaml files looking for required external files, then downloads them locally to $BUILD_HOME/mirrors. This commit also saves the file names of such files to a pair of list files under $MY_WORKSPACE/required_downloads: * sources.txt: list of source-type downloads (from meta_data.yaml), relative to their download location, .../mirrors/starlingx/sources/ * binaries.txt: list of DEB-type downloads (from download list files), relative to their download location, .../mirrors/starlingx/binaries/ This is necessary for publishing only the required files in Jenkins. Story: 2010226 Task: 50642 TESTS ======================================== * Delete $BUILD_HOME/mirrors/* * Run downloader and make sure each file is recorded in $MY_WORKPACE/required_downloads/*.txt * Re-run downloader and make sure each file is recorded, even though none have been downloaded during this run * Build all packages Change-Id: I22be91bf95bdf8afedf331c2fcba9f6fcc2176ad Signed-off-by: Davlet Panech <davlet.panech@windriver.com>
This commit is contained in:
parent
e9313cc9d8
commit
4e2470da48
@ -81,18 +81,21 @@ class DownloadProgress():
|
||||
else:
|
||||
self.pbar.finish()
|
||||
|
||||
def verify_dsc_file(dsc_file, sha256, logger):
|
||||
def verify_dsc_file(dsc_file, sha256, logger)->list[str]:
|
||||
|
||||
if not os.path.isfile(dsc_file):
|
||||
return None
|
||||
|
||||
# with sha256 supplied, verify it, but not the GPG signature
|
||||
if sha256:
|
||||
if not checksum(dsc_file, sha256, 'sha256sum', logger):
|
||||
return False
|
||||
return None
|
||||
try:
|
||||
cmd = 'dscverify --nosigcheck %s' % dsc_file
|
||||
out,err = run_shell_cmd_full(cmd, logger, logging.INFO)
|
||||
except subprocess.CalledProcessError:
|
||||
logger.warning ('%s: dscverify failed', dsc_file)
|
||||
return False
|
||||
return None
|
||||
# fall through
|
||||
|
||||
# otherwise verify the GPG signature, and if its the only problem,
|
||||
@ -109,7 +112,7 @@ def verify_dsc_file(dsc_file, sha256, logger):
|
||||
out,err = run_shell_cmd_full(cmd, logger, logging.INFO)
|
||||
except subprocess.CalledProcessError:
|
||||
logger.warning ('%s: dscverify failed', dsc_file)
|
||||
return False
|
||||
return None
|
||||
# succeeded w/o GPG check: print a warning
|
||||
logger.warning('%s: GPG signature check failed. You can suppress ' +
|
||||
'this warning by adding a dsc_sha256 option with the ' +
|
||||
@ -123,9 +126,16 @@ def verify_dsc_file(dsc_file, sha256, logger):
|
||||
# Look for those and assume verification failed in this case.
|
||||
if err.find('(not present)') != -1:
|
||||
logger.warning ('%s: one or more referenced files are missing', dsc_file)
|
||||
return False
|
||||
return None
|
||||
|
||||
return True
|
||||
# Return the list of all files
|
||||
flist = [ dsc_file ]
|
||||
with open(dsc_file) as f:
|
||||
dsc = debian.deb822.Dsc(f)
|
||||
for file in dsc['Files']:
|
||||
flist.append(file['name'])
|
||||
|
||||
return flist
|
||||
|
||||
|
||||
def get_str_md5(text):
|
||||
@ -746,12 +756,15 @@ class Parser():
|
||||
|
||||
def download(self, pkgpath, mirror):
|
||||
|
||||
rel_used_dl_files = []
|
||||
|
||||
self.setup(pkgpath)
|
||||
if not os.path.exists(mirror):
|
||||
self.logger.error("No such %s directory", mirror)
|
||||
raise ValueError(f"No such {mirror} directory")
|
||||
|
||||
saveto = os.path.join(mirror, self.pkginfo["pkgname"])
|
||||
rel_saveto = self.pkginfo["pkgname"]
|
||||
saveto = os.path.join(mirror, rel_saveto)
|
||||
if not os.path.exists(saveto):
|
||||
os.mkdir(saveto)
|
||||
|
||||
@ -780,6 +793,7 @@ class Parser():
|
||||
download(dl_url, dl_file, self.logger)
|
||||
if not checksum(dl_file, check_sum, check_cmd, self.logger):
|
||||
raise Exception(f'Fail to download {dl_file}')
|
||||
rel_used_dl_files.append(dl_file)
|
||||
|
||||
if "dl_path" in self.meta_data:
|
||||
dl_file = self.meta_data["dl_path"]["name"]
|
||||
@ -802,12 +816,14 @@ class Parser():
|
||||
download(dl_url, dl_file, self.logger)
|
||||
if not checksum(dl_file, check_sum, check_cmd, self.logger):
|
||||
raise Exception(f'Failed to download {dl_file}')
|
||||
rel_used_dl_files.append(dl_file)
|
||||
|
||||
elif "archive" in self.meta_data:
|
||||
ver = self.versions["full_version"].split(":")[-1]
|
||||
dsc_filename = self.pkginfo["debname"] + "_" + ver + ".dsc"
|
||||
|
||||
if not os.path.exists(dsc_filename) or not verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger):
|
||||
dsc_member_files = verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger)
|
||||
if not dsc_member_files:
|
||||
self.logger.info ('%s: file not found, or integrity verification failed; (re-)downloading...', dsc_filename)
|
||||
|
||||
# save to a temporary directory, then move into place
|
||||
@ -831,7 +847,8 @@ class Parser():
|
||||
run_shell_cmd("dget %s %s" % (dget_flags, dl_url), self.logger)
|
||||
|
||||
# verify checksums/signatures
|
||||
if not verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger):
|
||||
dsc_member_files = verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger)
|
||||
if not dsc_member_files:
|
||||
raise Exception('%s: %s: DSC file verification failed' % (self.meta_data_file, dsc_filename))
|
||||
|
||||
# move downloaded files into place
|
||||
@ -841,6 +858,7 @@ class Parser():
|
||||
finally:
|
||||
os.chdir(saveto)
|
||||
|
||||
rel_used_dl_files += dsc_member_files
|
||||
|
||||
# Upload it to aptly
|
||||
# FIXME: this parameter is always None (?)
|
||||
@ -853,7 +871,8 @@ class Parser():
|
||||
|
||||
# See also comments in the "archive" section above.
|
||||
|
||||
if not os.path.exists(dsc_filename) or not verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger):
|
||||
dsc_member_files = verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger)
|
||||
if not dsc_member_files:
|
||||
self.logger.info ('%s: file not found, or integrity verification failed; (re-)downloading...', dsc_filename)
|
||||
|
||||
# save to a temporary directory, then move into place
|
||||
@ -884,7 +903,8 @@ class Parser():
|
||||
run_shell_cmd("apt-get source %s %s" % (apt_get_flags, fullname), self.logger)
|
||||
|
||||
# verify checksums/signatures
|
||||
if not verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger):
|
||||
dsc_member_files = verify_dsc_file(dsc_filename, self.dsc_sha256, logger=self.logger)
|
||||
if not dsc_member_files:
|
||||
raise Exception('%s: %s: DSC file verification failed' % (self.meta_data_file, dsc_filename))
|
||||
|
||||
# move downloaded files into place
|
||||
@ -894,6 +914,8 @@ class Parser():
|
||||
finally:
|
||||
os.chdir(saveto)
|
||||
|
||||
rel_used_dl_files += dsc_member_files
|
||||
|
||||
# Upload it to aptly
|
||||
# FIXME: this parameter is always None (?)
|
||||
if self.srcrepo is not None:
|
||||
@ -901,6 +923,9 @@ class Parser():
|
||||
|
||||
os.chdir(pwd)
|
||||
|
||||
used_dl_files = [ '%s/%s' % (rel_saveto, file) for file in rel_used_dl_files ]
|
||||
return used_dl_files
|
||||
|
||||
def package(self, pkgpath, mirror):
|
||||
|
||||
self.setup(pkgpath)
|
||||
|
@ -19,6 +19,7 @@ import argparse
|
||||
import debrepack
|
||||
import discovery
|
||||
import fnmatch
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
@ -201,7 +202,7 @@ def update_apt():
|
||||
logger.error(f" An unexpected error occurred {e}")
|
||||
|
||||
class BaseDownloader():
|
||||
def __init__(self, arch, _dl_dir, clean):
|
||||
def __init__(self, arch, _dl_dir, dl_list_file, clean):
|
||||
self.dl_dir = _dl_dir
|
||||
self.arch = arch
|
||||
self.clean_mirror = clean
|
||||
@ -213,6 +214,7 @@ class BaseDownloader():
|
||||
self.repomgr = repo_manage.RepoMgr('aptly', os.environ.get('REPOMGR_URL'),
|
||||
'/tmp/', os.environ.get('REPOMGR_ORIGIN'),
|
||||
rlogger)
|
||||
self.dl_list_fh = open(dl_list_file, 'w')
|
||||
|
||||
def clean(self):
|
||||
if os.path.exists(self.dl_dir):
|
||||
@ -256,10 +258,15 @@ class BaseDownloader():
|
||||
logger.error(' '.join([dlobj.strip()]))
|
||||
|
||||
|
||||
def save_dl_file_names(self, filename_list):
|
||||
for filename in filename_list:
|
||||
print (filename, file=self.dl_list_fh)
|
||||
self.dl_list_fh.flush()
|
||||
|
||||
|
||||
class DebDownloader(BaseDownloader):
|
||||
def __init__(self, arch, _dl_dir, force, _layer_binaries):
|
||||
super(DebDownloader, self).__init__(arch, _dl_dir, force)
|
||||
def __init__(self, arch, _dl_dir, dl_list_file, force, _layer_binaries):
|
||||
super(DebDownloader, self).__init__(arch, _dl_dir, dl_list_file, force)
|
||||
self.need_download = []
|
||||
self.downloaded = []
|
||||
self.need_upload = []
|
||||
@ -439,6 +446,7 @@ class DebDownloader(BaseDownloader):
|
||||
previously_uploaded = self.repomgr.list_pkgs(repo)
|
||||
logger.info(' '.join(['previously uploaded to repo', repo, ':', str(previously_uploaded)]))
|
||||
|
||||
used_dl_files = []
|
||||
pkg_data_map = {}
|
||||
if pkg_data:
|
||||
for pkg_dict in pkg_data:
|
||||
@ -466,6 +474,7 @@ class DebDownloader(BaseDownloader):
|
||||
logger.debug(''.join([pname_epoch_arch, ' has been downloaded, skip']))
|
||||
self.dl_success.append(pkg_name + '_' + pkg_ver)
|
||||
self.need_upload.append([pname_arch, pname_epoch_arch])
|
||||
self.save_dl_file_names([pname_arch])
|
||||
else:
|
||||
# Tests show that the 'epoch' should be taken when
|
||||
# fetch the package with 'apt' module, there is not 'epoch'
|
||||
@ -484,6 +493,7 @@ class DebDownloader(BaseDownloader):
|
||||
|
||||
ret = self.download(debnames[0], debnames[1], url)
|
||||
if ret:
|
||||
self.save_dl_file_names([os.path.basename (ret)])
|
||||
deb_ver = debnames[1].split(":")[-1]
|
||||
deb_ver_epoch = '_'.join([debnames[0], debnames[1]])
|
||||
logger.info(' '.join([deb_ver_epoch, ' download ok']))
|
||||
@ -579,8 +589,8 @@ class DebDownloader(BaseDownloader):
|
||||
|
||||
|
||||
class SrcDownloader(BaseDownloader):
|
||||
def __init__(self, arch, _dl_dir, force):
|
||||
super(SrcDownloader, self).__init__(arch, _dl_dir, force)
|
||||
def __init__(self, arch, _dl_dir, dl_list_file, force):
|
||||
super(SrcDownloader, self).__init__(arch, _dl_dir, dl_list_file, force)
|
||||
self.parser = None
|
||||
|
||||
def prepare(self):
|
||||
@ -599,18 +609,17 @@ class SrcDownloader(BaseDownloader):
|
||||
|
||||
return True
|
||||
|
||||
def download_pkg_src(self, _pkg_path):
|
||||
def download_pkg_src(self, _pkg_path)->list[str]:
|
||||
if not self.parser:
|
||||
return False
|
||||
return None
|
||||
try:
|
||||
self.parser.download(_pkg_path, self.dl_dir)
|
||||
return self.parser.download(_pkg_path, self.dl_dir)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.error("Failed to download source with %s", _pkg_path)
|
||||
return False
|
||||
return True
|
||||
return None
|
||||
|
||||
def download_all(self, distro=STX_DEFAULT_DISTRO, layers=None, build_types=None):
|
||||
def download_all(self, distro=STX_DEFAULT_DISTRO, layers=None, build_types=None)->list[str]:
|
||||
logger.info("download_all, layers=%s, build_types=%s" % (layers, build_types))
|
||||
if layers:
|
||||
for layer in layers:
|
||||
@ -649,9 +658,11 @@ class SrcDownloader(BaseDownloader):
|
||||
logger.info("Starting to download %d source packages", len(pkg_dirs))
|
||||
logger.info("%s", sorted(self.dl_need))
|
||||
for pkg_dir in pkg_dirs:
|
||||
if self.download_pkg_src(pkg_dir):
|
||||
dl_files = self.download_pkg_src(pkg_dir)
|
||||
if dl_files is not None:
|
||||
if pkg_dir in pkg_dirs_to_names:
|
||||
self.dl_success.append(pkg_dirs_to_names[pkg_dir])
|
||||
self.save_dl_file_names (dl_files)
|
||||
else:
|
||||
if pkg_dir in pkg_dirs_to_names:
|
||||
self.dl_failed.append(pkg_dirs_to_names[pkg_dir])
|
||||
@ -749,14 +760,20 @@ if __name__ == "__main__":
|
||||
|
||||
update_apt()
|
||||
|
||||
dl_list_dir = '%s/required_downloads' % os.environ['MY_WORKSPACE']
|
||||
if os.path.isdir(dl_list_dir):
|
||||
shutil.rmtree(dl_list_dir)
|
||||
os.makedirs(dl_list_dir, exist_ok=True)
|
||||
if args.download_binary:
|
||||
all_binary_lists = get_all_binary_list(distro=distro, layers=layers, build_types=build_types)
|
||||
binary_dl = DebDownloader(DEFAULT_ARCH, stx_bin_mirror, clean_mirror, all_binary_lists)
|
||||
dl_list_file_bin = '%s/binaries.txt' % dl_list_dir
|
||||
binary_dl = DebDownloader(DEFAULT_ARCH, stx_bin_mirror, dl_list_file_bin, clean_mirror, all_binary_lists)
|
||||
if not binary_dl.create_binary_repo():
|
||||
sys.exit(1)
|
||||
|
||||
if args.download_source:
|
||||
source_dl = SrcDownloader(DEFAULT_ARCH, stx_src_mirror, clean_mirror)
|
||||
dl_list_file_src = '%s/sources.txt' % dl_list_dir
|
||||
source_dl = SrcDownloader(DEFAULT_ARCH, stx_src_mirror, dl_list_file_src, clean_mirror)
|
||||
|
||||
dl_register_signal_handler()
|
||||
if binary_dl:
|
||||
@ -770,6 +787,14 @@ if __name__ == "__main__":
|
||||
logger.info('Show the download result for source packages:')
|
||||
source_ret = source_dl.reports()
|
||||
|
||||
# sort required_download lists
|
||||
for dl_list_file in glob.glob('%s/*.txt' % dl_list_dir):
|
||||
if os.path.isfile(dl_list_file):
|
||||
cmd = 'file="%s" && sort -u "$file" >"$file".tmp && mv -f "$file".tmp "$file"' % dl_list_file
|
||||
utils.run_shell_cmd(cmd, logger)
|
||||
|
||||
logger.info('Required downloads\' file names are in %s/', dl_list_dir)
|
||||
|
||||
logger.info("Verifying downloader return status")
|
||||
if binary_ret != 0:
|
||||
logger.error("Binary downloader failed")
|
||||
|
Loading…
Reference in New Issue
Block a user