
If buck build was done on a system without curl installed a misleading error message would be printed out: error creating directory /home/user/.gerritcodereview/buck-cache However, the real error was that invocation of curl failed. Since it was an OSError it was handled with the wrong except block. Separate handling of the OSError for the folder creation and curl invocation. Change-Id: Ic7e7c2c2704ea4cbccff4689dffe17a436108395
182 lines
4.8 KiB
Python
Executable File
182 lines
4.8 KiB
Python
Executable File
#!/usr/bin/python
|
|
# Copyright (C) 2013 The Android Open Source Project
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
from __future__ import print_function
|
|
|
|
from hashlib import sha1
|
|
from optparse import OptionParser
|
|
from os import link, makedirs, path, remove, symlink
|
|
import shutil
|
|
from subprocess import check_call, CalledProcessError
|
|
from sys import stderr
|
|
from zipfile import ZipFile, BadZipfile, LargeZipFile
|
|
|
|
REPO_ROOTS = {
|
|
'GERRIT': 'http://gerrit-maven.commondatastorage.googleapis.com',
|
|
'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
|
|
}
|
|
|
|
GERRIT_HOME = path.expanduser('~/.gerritcodereview')
|
|
CACHE_DIR = path.join(GERRIT_HOME, 'buck-cache')
|
|
LOCAL_PROPERTIES = 'local.properties'
|
|
|
|
|
|
def hashfile(p):
|
|
d = sha1()
|
|
with open(p, 'rb') as f:
|
|
while True:
|
|
b = f.read(8192)
|
|
if not b:
|
|
break
|
|
d.update(b)
|
|
return d.hexdigest()
|
|
|
|
def safe_mkdirs(d):
|
|
if path.isdir(d):
|
|
return
|
|
try:
|
|
makedirs(d)
|
|
except OSError as err:
|
|
if not path.isdir(d):
|
|
raise err
|
|
|
|
def download_properties(root_dir):
|
|
""" Get the download properties.
|
|
|
|
First tries to find the properties file in the given root directory,
|
|
and if not found there, tries in the Gerrit settings folder in the
|
|
user's home directory.
|
|
|
|
Returns a set of download properties, which may be empty.
|
|
|
|
"""
|
|
p = {}
|
|
local_prop = path.join(root_dir, LOCAL_PROPERTIES)
|
|
if not path.isfile(local_prop):
|
|
local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
|
|
if path.isfile(local_prop):
|
|
try:
|
|
with open(local_prop) as fd:
|
|
for line in fd:
|
|
if line.startswith('download.'):
|
|
d = [e.strip() for e in line.split('=', 1)]
|
|
name, url = d[0], d[1]
|
|
p[name[len('download.'):]] = url
|
|
except OSError:
|
|
pass
|
|
return p
|
|
|
|
def cache_entry(args):
|
|
if args.v:
|
|
h = args.v
|
|
else:
|
|
h = sha1(args.u).hexdigest()
|
|
name = '%s-%s' % (path.basename(args.o), h)
|
|
return path.join(CACHE_DIR, name)
|
|
|
|
def resolve_url(url, redirects):
|
|
s = url.find(':')
|
|
if s < 0:
|
|
return url
|
|
scheme, rest = url[:s], url[s+1:]
|
|
if scheme not in REPO_ROOTS:
|
|
return url
|
|
if scheme in redirects:
|
|
root = redirects[scheme]
|
|
else:
|
|
root = REPO_ROOTS[scheme]
|
|
root = root.rstrip('/')
|
|
rest = rest.lstrip('/')
|
|
return '/'.join([root, rest])
|
|
|
|
opts = OptionParser()
|
|
opts.add_option('-o', help='local output file')
|
|
opts.add_option('-u', help='URL to download')
|
|
opts.add_option('-v', help='expected content SHA-1')
|
|
opts.add_option('-x', action='append', help='file to delete from ZIP')
|
|
opts.add_option('--exclude_java_sources', action='store_true')
|
|
args, _ = opts.parse_args()
|
|
|
|
root_dir = args.o
|
|
while root_dir:
|
|
root_dir, n = path.split(root_dir)
|
|
if n == 'buck-out':
|
|
break
|
|
|
|
redirects = download_properties(root_dir)
|
|
cache_ent = cache_entry(args)
|
|
src_url = resolve_url(args.u, redirects)
|
|
|
|
if not path.exists(cache_ent):
|
|
try:
|
|
safe_mkdirs(path.dirname(cache_ent))
|
|
except OSError as err:
|
|
print('error creating directory %s: %s' %
|
|
(path.dirname(cache_ent), err), file=stderr)
|
|
exit(1)
|
|
|
|
print('Download %s' % src_url, file=stderr)
|
|
try:
|
|
check_call(['curl', '--proxy-anyauth', '-sfo', cache_ent, src_url])
|
|
except OSError as err:
|
|
print('could not invoke curl: %s\nis curl installed?' % err, file=stderr)
|
|
exit(1)
|
|
except CalledProcessError as err:
|
|
print('error using curl: %s' % err, file=stderr)
|
|
exit(1)
|
|
|
|
if args.v:
|
|
have = hashfile(cache_ent)
|
|
if args.v != have:
|
|
print((
|
|
'%s:\n' +
|
|
'expected %s\n' +
|
|
'received %s\n') % (src_url, args.v, have), file=stderr)
|
|
try:
|
|
remove(cache_ent)
|
|
except OSError as err:
|
|
if path.exists(cache_ent):
|
|
print('error removing %s: %s' % (cache_ent, err), file=stderr)
|
|
exit(1)
|
|
|
|
exclude = []
|
|
if args.x:
|
|
exclude += args.x
|
|
if args.exclude_java_sources:
|
|
try:
|
|
zf = ZipFile(cache_ent, 'r')
|
|
try:
|
|
for n in zf.namelist():
|
|
if n.endswith('.java'):
|
|
exclude.append(n)
|
|
finally:
|
|
zf.close()
|
|
except (BadZipfile, LargeZipFile) as err:
|
|
print("error opening %s: %s" % (cache_ent, err), file=stderr)
|
|
exit(1)
|
|
|
|
safe_mkdirs(path.dirname(args.o))
|
|
if exclude:
|
|
shutil.copyfile(cache_ent, args.o)
|
|
try:
|
|
check_call(['zip', '-d', args.o] + exclude)
|
|
except CalledProcessError as err:
|
|
print('error removing files from zip: %s' % err, file=stderr)
|
|
else:
|
|
try:
|
|
link(cache_ent, args.o)
|
|
except OSError as err:
|
|
symlink(cache_ent, args.o)
|