updated pip installation process

extended the base config for pip to use an install section

removed extra options that are already in the base config
This commit is contained in:
Kevin Carter 2014-10-28 10:35:51 -05:00
parent 046bdd32ca
commit 9d4d7e31f7
5 changed files with 400 additions and 173 deletions

View File

@ -16,7 +16,7 @@
- name: Install pip dependencies - name: Install pip dependencies
pip: pip:
name: "{{ item }}" name: "{{ item }}"
extra_args: "--allow-all-external" extra_args: "{{ pip_install_options|default('') }}"
with_items: service_pip_dependencies with_items: service_pip_dependencies
when: service_pip_dependencies is defined when: service_pip_dependencies is defined
register: pip_install register: pip_install

View File

@ -2,3 +2,7 @@
no-index = true no-index = true
pre = true pre = true
timeout = 120 timeout = 120
[install]
upgrade = true
allow_all_external = true

View File

@ -16,7 +16,7 @@
- name: Install service source - name: Install service source
pip: pip:
name: "/opt/{{ service_name }}_{{ git_install_branch | replace('/', '_') }}" name: "/opt/{{ service_name }}_{{ git_install_branch | replace('/', '_') }}"
extra_args: "{{ pip_install_options|default('') }} --allow-all-external" extra_args: "{{ pip_install_options|default('') }}"
register: pip_install register: pip_install
until: pip_install|success until: pip_install|success
retries: 5 retries: 5
@ -27,7 +27,7 @@
- name: Install pip repo plugins - name: Install pip repo plugins
pip: pip:
name: "{{ git_dest }}/{{ item.path }}/{{ item.package }}" name: "{{ git_dest }}/{{ item.path }}/{{ item.package }}"
extra_args: "{{ pip_install_options|default('') }} --allow-all-external" extra_args: "{{ pip_install_options|default('') }}"
when: git_dest is defined and git_repo_plugins is defined when: git_dest is defined and git_repo_plugins is defined
with_items: git_repo_plugins with_items: git_repo_plugins
register: pip_install register: pip_install

View File

@ -38,7 +38,8 @@ PYTHON_PACKAGES = {
'base_release': dict(), 'base_release': dict(),
'known_release': dict(), 'known_release': dict(),
'from_git': dict(), 'from_git': dict(),
'required_packages': dict() 'required_packages': dict(),
'built_files': list()
} }
GIT_REPOS = [] GIT_REPOS = []
@ -54,28 +55,31 @@ VERSION_DESCRIPTORS = [
'>=', '<=', '==', '!=', '<', '>' '>=', '<=', '==', '!=', '<', '>'
] ]
# Defines constant for use later.
LOG = None
class IndicatorThread(object): class IndicatorThread(object):
"""Creates a visual indicator while normally performing actions.""" """Creates a visual indicator while normally performing actions."""
def __init__(self, work_q=None, system=True, debug=False): def __init__(self, note=None, system=True, debug=False, quiet=False):
"""System Operations Available on Load. """System Operations Available on Load."""
:param work_q:
:param system:
"""
self.quiet = quiet
self.debug = debug self.debug = debug
self.work_q = work_q
self.system = system self.system = system
self.note = note
if self.note is None:
self.note = 'Please Wait... '
self.job = None self.job = None
def __enter__(self): def __enter__(self):
if self.debug is False: if all([self.debug is False, self.quiet is False]):
self.indicator_thread() return self.indicator_thread()
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
if self.debug is False: self.system = False
if all([self.debug is False, self.quiet is False]):
print('Done.') print('Done.')
self.job.terminate() self.job.terminate()
@ -85,7 +89,7 @@ class IndicatorThread(object):
while self.system: while self.system:
busy_chars = ['|', '/', '-', '\\'] busy_chars = ['|', '/', '-', '\\']
for bc in busy_chars: for bc in busy_chars:
note = 'Please Wait... ' note = self.note
sys.stdout.write('\rProcessing - [ %s ] - %s' % (bc, note)) sys.stdout.write('\rProcessing - [ %s ] - %s' % (bc, note))
sys.stdout.flush() sys.stdout.flush()
time.sleep(.1) time.sleep(.1)
@ -113,7 +117,7 @@ def get_file_names(path, ext=None):
""" """
paths = os.walk(os.path.abspath(path)) paths = os.walk(os.path.abspath(path))
files = [] files = list()
for fpath, _, afiles in paths: for fpath, _, afiles in paths:
for afile in afiles: for afile in afiles:
if ext is not None: if ext is not None:
@ -301,7 +305,7 @@ def retryloop(attempts, timeout=None, delay=None, backoff=1, obj=None):
_error_handler(msg=error) _error_handler(msg=error)
def build_wheel(wheel_dir, build_dir, dist=None, pkg_name=None, quiet=False, def build_wheel(wheel_dir, build_dir, link_dir, dist=None, pkg_name=None,
make_opts=None): make_opts=None):
"""Execute python wheel build command. """Execute python wheel build command.
@ -314,7 +318,7 @@ def build_wheel(wheel_dir, build_dir, dist=None, pkg_name=None, quiet=False,
'pip', 'pip',
'wheel', 'wheel',
'--find-links', '--find-links',
wheel_dir, link_dir,
'--timeout', '--timeout',
'120', '120',
'--wheel-dir', '--wheel-dir',
@ -337,24 +341,36 @@ def build_wheel(wheel_dir, build_dir, dist=None, pkg_name=None, quiet=False,
build_command = ' '.join(command) build_command = ' '.join(command)
LOG.info('Command: %s' % build_command) LOG.info('Command: %s' % build_command)
output, unused_err = None, None
for retry in retryloop(3, obj=build_command, delay=2, backoff=1): for retry in retryloop(3, obj=build_command, delay=2, backoff=1):
try: try:
with IndicatorThread(debug=quiet): process = subprocess.Popen(
ret_data = subprocess.check_call( command,
command, stdout=subprocess.PIPE,
stdout=LoggerWriter(), stderr=LoggerWriter()
stderr=LoggerWriter() )
) output, unused_err = process.communicate()
retcode = process.poll()
LOG.info('Command return code: [ %s ]', ret_data) LOG.info('Command return code: [ %s ]', retcode)
if ret_data: if retcode:
raise subprocess.CalledProcessError(ret_data, build_command) raise subprocess.CalledProcessError(
retcode, build_command, output=output
)
except subprocess.CalledProcessError as exp: except subprocess.CalledProcessError as exp:
LOG.warn( LOG.warn(
'Process failure. Error: [ %s ]. Removing build directory' 'Process failure. stderr: [ %s ], stdout [ %s ], Exception'
' for retry. Check log for more detauls.', str(exp) ' [ %s ]. Removing build directory for retry. Check log for'
' more details.',
unused_err,
output,
str(exp)
) )
remove_dirs(directory=build_dir)
retry() retry()
finally:
# Ensure the build directories are clean
remove_dirs(directory=build_dir)
def remove_dirs(directory): def remove_dirs(directory):
@ -380,79 +396,123 @@ def remove_dirs(directory):
pass pass
def _requirements_maker(name, wheel_dir, release, build_dir, quiet, make_opts, def copy_file(src, dst):
iterate=False): LOG.debug('Copying [ %s ] -> [ %s ]', src, dst)
requirements_file_lines = [] with open(src, 'rb') as open_src:
for value in sorted(release.values()): with open(dst, 'wb') as open_dst:
requirements_file_lines.append('%s\n' % value) while True:
buf = open_src.read(24 * 1024)
if not buf:
break
else:
open_dst.write(buf)
requirements_file = os.path.join(wheel_dir, name)
with open(requirements_file, 'wb') as f: def _requirements_maker(name, wheel_dir, release, build_dir, make_opts,
f.writelines(requirements_file_lines) link_dir=None, iterate=False):
if link_dir is None:
link_dir = wheel_dir
if iterate is True: if iterate is True:
for pkg in sorted(release.values()): for pkg in sorted(release.values()):
build_wheel( build_wheel(
wheel_dir=wheel_dir, wheel_dir=wheel_dir,
build_dir=build_dir, build_dir=build_dir,
dist=None, link_dir=link_dir,
pkg_name=pkg, pkg_name=pkg,
quiet=quiet,
make_opts=make_opts make_opts=make_opts
) )
remove_dirs(directory=build_dir)
else: else:
requirements_file_lines = []
for value in sorted(set(release.values())):
requirements_file_lines.append('%s\n' % value)
requirements_file = os.path.join(wheel_dir, name)
with open(requirements_file, 'wb') as f:
f.writelines(requirements_file_lines)
build_wheel( build_wheel(
wheel_dir=wheel_dir, wheel_dir=wheel_dir,
build_dir=build_dir, build_dir=build_dir,
link_dir=link_dir,
dist=requirements_file, dist=requirements_file,
quiet=quiet,
make_opts=make_opts make_opts=make_opts
) )
remove_dirs(directory=build_dir)
def make_wheels(wheel_dir, build_dir, quiet): def _make_wheels(wheel_dir, build_dir, temp_store_dir):
LOG.info('Building base packages')
_requirements_maker(
name='rpc_base_requirements.txt',
wheel_dir=temp_store_dir,
release=PYTHON_PACKAGES['base_release'],
build_dir=build_dir,
make_opts=None,
link_dir=wheel_dir
)
LOG.info('Building known absolute packages')
_requirements_maker(
name='rpc_known_requirements.txt',
wheel_dir=temp_store_dir,
release=PYTHON_PACKAGES['known_release'],
build_dir=build_dir,
make_opts=['--no-deps'],
link_dir=wheel_dir
)
LOG.info('Building required packages')
_requirements_maker(
name='rpc_required_requirements.txt',
wheel_dir=temp_store_dir,
release=PYTHON_PACKAGES['required_packages'],
build_dir=build_dir,
make_opts=None,
link_dir=wheel_dir,
iterate=True
)
built_wheels = get_file_names(temp_store_dir)
PYTHON_PACKAGES['built_files'] = [
os.path.basename(i) for i in built_wheels
]
LOG.info('Moving built packages into place')
for built_wheel in built_wheels:
wheel_file = os.path.join(wheel_dir, os.path.basename(built_wheel))
if os.path.exists(wheel_file):
if os.path.getsize(wheel_file) != os.path.getsize(built_wheel):
copy_file(src=built_wheel, dst=wheel_file)
else:
copy_file(src=built_wheel, dst=wheel_file)
def make_wheels(wheel_dir, build_dir):
"""Build wheels of all installed packages that don't already have one. """Build wheels of all installed packages that don't already have one.
:param wheel_dir: ``str`` $PATH to local save directory :param wheel_dir: ``str`` $PATH to local save directory
:param build_dir: ``str`` $PATH to temp build directory :param build_dir: ``str`` $PATH to temp build directory
""" """
_requirements_maker( temp_store_dir = os.path.join(
name='rpc_base_requirements.txt', tempfile.mkdtemp(prefix='rpc_wheels_temp_storage')
wheel_dir=wheel_dir,
release=PYTHON_PACKAGES['base_release'],
build_dir=build_dir,
quiet=quiet,
make_opts=None
) )
_mkdirs(path=temp_store_dir)
_requirements_maker( try:
name='rpc_required_requirements.txt', _make_wheels(
wheel_dir=wheel_dir, wheel_dir=wheel_dir,
release=PYTHON_PACKAGES['required_packages'], build_dir=build_dir,
build_dir=build_dir, temp_store_dir=temp_store_dir
quiet=quiet, )
make_opts=None, finally:
iterate=True remove_dirs(directory=temp_store_dir)
) remove_dirs(
directory=os.path.join(
_requirements_maker( tempfile.gettempdir(),
name='rpc_known_requirements.txt', 'pip_build_root'
wheel_dir=wheel_dir, )
release=PYTHON_PACKAGES['known_release'],
build_dir=build_dir,
quiet=quiet,
make_opts=['--no-deps']
)
remove_dirs(
directory=os.path.join(
tempfile.gettempdir(),
'pip_build_root'
) )
)
def ensure_consistency(): def ensure_consistency():
@ -463,7 +523,7 @@ def ensure_consistency():
PYTHON_PACKAGES['base_release'].pop(key, None) PYTHON_PACKAGES['base_release'].pop(key, None)
def new_setup(user_args, input_path, output_path, quiet): def new_setup(user_args, input_path):
"""Discover all yaml files in the input directory.""" """Discover all yaml files in the input directory."""
LOG.info('Discovering input file(s)') LOG.info('Discovering input file(s)')
@ -483,35 +543,12 @@ def new_setup(user_args, input_path, output_path, quiet):
# Populate the package dict # Populate the package dict
LOG.info('Building the package list') LOG.info('Building the package list')
with IndicatorThread(debug=quiet): for var_file in var_files:
for var_file in var_files: package_dict(var_file=var_file)
package_dict(var_file=var_file)
# Ensure no general packages take precedence over the explicit ones # Ensure no general packages take precedence over the explicit ones
ensure_consistency() ensure_consistency()
# Get a timestamp and create a report file
utctime = datetime.datetime.utcnow()
utctime = utctime.strftime("%Y%m%d_%H%M%S")
backup_name = 'python-build-report-%s.json' % utctime
output_report_file = os.path.join(
output_path,
'json-reports',
backup_name
)
_mkdirs(os.path.dirname(output_report_file))
# Generate a timestamped report file
LOG.info('Generating packaging report [ %s ]', output_report_file)
with open(output_report_file, 'wb') as f:
f.write(
json.dumps(
PYTHON_PACKAGES,
indent=2,
sort_keys=True
)
)
def _error_handler(msg, system_exit=True): def _error_handler(msg, system_exit=True):
"""Handle and error logging and exit the application if needed. """Handle and error logging and exit the application if needed.
@ -570,6 +607,19 @@ def _user_args():
required=False, required=False,
default=None default=None
) )
parser.add_argument(
'--link-dir',
help='Path to the build links for all built wheels.',
required=False,
default=None
)
parser.add_argument(
'-r',
'--release',
help='Name of the release. Used for generating the json report.',
required=True,
default=None
)
opts = parser.add_mutually_exclusive_group() opts = parser.add_mutually_exclusive_group()
opts.add_argument( opts.add_argument(
'--debug', '--debug',
@ -617,50 +667,48 @@ def _mkdirs(path):
_error_handler(msg=error) _error_handler(msg=error)
def _store_git_repos(git_repos_path, quiet): def _store_git_repos(git_repos_path):
"""Clone and or update all git repos. """Clone and or update all git repos.
:param git_repos_path: ``str`` Path to where to store the git repos :param git_repos_path: ``str`` Path to where to store the git repos
:param quiet: ``bol`` Enable quiet mode.
""" """
_mkdirs(git_repos_path) _mkdirs(git_repos_path)
for retry in retryloop(3, delay=2, backoff=1): for retry in retryloop(3, delay=2, backoff=1):
for git_repo in GIT_REPOS: for git_repo in GIT_REPOS:
with IndicatorThread(debug=quiet): repo_name = os.path.basename(git_repo)
repo_name = os.path.basename(git_repo) if repo_name.endswith('.git'):
if repo_name.endswith('.git'): repo_name = repo_name.rstrip('git')
repo_name = repo_name.rstrip('git')
repo_path_name = os.path.join(git_repos_path, repo_name) repo_path_name = os.path.join(git_repos_path, repo_name)
if os.path.isdir(repo_path_name): if os.path.isdir(repo_path_name):
os.chdir(repo_path_name) os.chdir(repo_path_name)
LOG.debug('Updating git repo [ %s ]', repo_path_name) LOG.debug('Updating git repo [ %s ]', repo_path_name)
commands = [ commands = [
['git', 'fetch', '-p', 'origin'], ['git', 'fetch', '-p', 'origin'],
['git', 'pull'] ['git', 'pull']
] ]
else: else:
LOG.debug('Cloning into git repo [ %s ]', repo_path_name) LOG.debug('Cloning into git repo [ %s ]', repo_path_name)
commands = [ commands = [
['git', 'clone', git_repo, repo_path_name] ['git', 'clone', git_repo, repo_path_name]
] ]
for command in commands: for command in commands:
try: try:
ret_data = subprocess.check_call( ret_data = subprocess.check_call(
command, command,
stdout=LoggerWriter(), stdout=LoggerWriter(),
stderr=LoggerWriter() stderr=LoggerWriter()
)
if ret_data:
raise subprocess.CalledProcessError(
ret_data, command
) )
if ret_data: except subprocess.CalledProcessError as exp:
raise subprocess.CalledProcessError( LOG.warn('Process failure. Error: [ %s ]', str(exp))
ret_data, command retry()
) else:
except subprocess.CalledProcessError as exp: LOG.debug('Command return code: [ %s ]', ret_data)
LOG.warn('Process failure. Error: [ %s ]', str(exp))
retry()
else:
LOG.debug('Command return code: [ %s ]', ret_data)
def main(): def main():
@ -696,40 +744,109 @@ def main():
# Create the build path # Create the build path
LOG.info('Getting build path') LOG.info('Getting build path')
if user_args['build_dir'] is not None: indicator_kwargs = {
build_path = _get_abs_path(path=user_args['build_dir']) 'debug': user_args['debug'],
_mkdirs(path=build_path) 'quiet': user_args['quiet'],
else: 'note': 'Gather dependencies... '
build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_') }
pre_input = user_args['pre_input'] with IndicatorThread(**indicator_kwargs):
if pre_input: if user_args['build_dir'] is not None:
pre_input_path = _get_abs_path(path=user_args['pre_input']) build_path = _get_abs_path(path=user_args['build_dir'])
with open(pre_input_path, 'rb') as f: _mkdirs(path=build_path)
global PYTHON_PACKAGES
PYTHON_PACKAGES = json.loads(f.read())
else: else:
# Get the input path build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_')
LOG.info('Getting input path') pre_input = user_args['pre_input']
input_path = _get_abs_path(path=user_args['input']) if pre_input:
new_setup( pre_input_path = _get_abs_path(path=user_args['pre_input'])
user_args=user_args, with open(pre_input_path, 'rb') as f:
input_path=input_path, global PYTHON_PACKAGES
output_path=output_path, PYTHON_PACKAGES = json.loads(f.read())
quiet=stream else:
) # Get the input path
LOG.info('Getting input path')
new_setup(
user_args=user_args,
input_path=_get_abs_path(path=user_args['input'])
)
indicator_kwargs['note'] = 'Building wheels... '
with IndicatorThread(**indicator_kwargs):
# Create all of the python package wheels # Create all of the python package wheels
make_wheels( make_wheels(
wheel_dir=output_path, wheel_dir=output_path,
build_dir=build_path, build_dir=build_path
quiet=stream
) )
indicator_kwargs['note'] = 'Generating build log... '
with IndicatorThread(**indicator_kwargs):
# Get a timestamp and create a report file
utctime = datetime.datetime.utcnow()
utctime = utctime.strftime("%Y%m%d_%H%M%S")
backup_name = '%s-build-report-%s.json' % (
user_args['release'],
utctime
)
output_report_file = os.path.join(
output_path,
'json-reports',
backup_name
)
# Make the directory if needed
_mkdirs(path=os.path.dirname(output_report_file))
# Generate a timestamped report file
LOG.info('Generating packaging report [ %s ]', output_report_file)
with open(output_report_file, 'wb') as f:
f.write(
json.dumps(
PYTHON_PACKAGES,
indent=2,
sort_keys=True
)
)
# If link_dir is defined create a link to all built wheels.
links_path = user_args.get('link_dir')
if links_path:
indicator_kwargs['note'] = 'Creating file links... '
with IndicatorThread(**indicator_kwargs):
links_path = _get_abs_path(path=links_path)
LOG.info('Creating Links at [ %s ]', links_path)
_mkdirs(path=links_path)
# Change working directory.
os.chdir(links_path)
# Create all the links
for inode in PYTHON_PACKAGES['built_files']:
try:
dest_link = os.path.join(links_path, inode)
# Remove the destination inode if it exists
if os.path.exists(dest_link):
os.remove(dest_link)
# Create the link using the relative path
os.symlink(os.path.relpath(
os.path.join(output_path, inode)), dest_link
)
except OSError as exp:
LOG.warn(
'Error Creating Link: [ %s ] Error: [ %s ]',
inode,
exp
)
else:
LOG.debug('Link Created: [ %s ]', dest_link)
# if git_repos was defined save all of the sources to the defined location # if git_repos was defined save all of the sources to the defined location
git_repos_path = user_args.get('git_repos') git_repos_path = user_args.get('git_repos')
if git_repos_path: if git_repos_path:
_store_git_repos(git_repos_path, quiet=stream) indicator_kwargs['note'] = 'Storing updated git sources...'
with IndicatorThread(**indicator_kwargs):
LOG.info('Updating git sources [ %s ]', links_path)
_store_git_repos(_get_abs_path(path=git_repos_path))
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -13,22 +13,128 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Notes:
# To use this script you MUST move it to some path that will be called.
# I recommend that the script be stored and executed from
# "/opt/rpc-wheel-builder.sh". This script is a wrapper script that relies
# on the "rpc-wheel-builder.py" and is execute from
# "/opt/rpc-wheel-builder.py".
# Overrides:
# This script has several things that can be overriden via environment
# variables.
# Git repository that the rcbops ansible lxc source code will be cloned from.
# This repo should be a repo that is available via HTTP.
# GIT_REPO=""
# The URI for the github api. This is ONLY used when the $RELEASES variable
# is an empty string. Which causes the script to go discover the available
# releases.
# GITHUB_API_ENDPOINT=""
# Local directory to store the source code while interacting with it.
# WORK_DIR=""
# Local directory to store the built wheels.
# OUTPUT_WHEEL_PATH=""
# Local directory to store known git repos.
# OUTPUT_GIT_PATH=""
# Space seperated list of all releases to build for. If unset the releases
# will be discovered.
# RELEASES=""
# Space seperated list of all releases to exclude from building. This is
# ONLY used when the $RELEASES variable is an empty string.
# EXCLUDE_RELEASES=""
set -e -o -v set -e -o -v
WORK_DIR="/opt/ansible-lxc-rpc" # Ensure there is a base path loaded
GIT_REPO="https://github.com/rcbops/ansible-lxc-rpc" export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
REPO_PACKAGES_PATH="/opt/ansible-lxc-rpc/rpc_deployment/vars/repo_packages/"
OUTPUT_WHEEL_PATH="/var/www/repo/python_packages"
RELEASE=$1
rm -rf /tmp/rpc_wheels* # Defined variables
rm -rf /tmp/pip* GIT_REPO="${GIT_REPO:-https://github.com/rcbops/ansible-lxc-rpc}"
rm -rf "${WORK_DIR}" GITHUB_API_ENDPOINT="${GITHUB_API_ENDPOINT:-https://api.github.com/repos/rcbops/ansible-lxc-rpc}"
git clone "${GIT_REPO}" "${WORK_DIR}" WORK_DIR="${WORK_DIR:-/opt/ansible-lxc-rpc}"
pushd "${WORK_DIR}" REPO_PACKAGES_PATH="${WORK_DIR}/rpc_deployment/vars/repo_packages/"
git checkout "${RELEASE}"
popd
${WORK_DIR}/scripts/rpc-wheel-builder.py -i "${REPO_PACKAGES_PATH}" \ OUTPUT_WHEEL_PATH="${OUTPUT_WHEEL_PATH:-/var/www/repo/python_packages}"
-o "${OUTPUT_WHEEL_PATH}"/"${RELEASE}" OUTPUT_GIT_PATH="${OUTPUT_GIT_PATH:-/var/www/repo/rpcgit}"
# Default is an empty string which causes the script to go discover the available
# branches from the github API.
RELEASES=${RELEASES:-""}
EXCLUDE_RELEASES="${EXCLUDE_RELEASES:-v9.0.0 gh-pages revert}"
if [[ ! "${RELEASES}" ]];then
# From the GITHUB API pull a list of all branches/tags
RELEASES=$(
$(which python) <<EOF
import requests
# Create an array of excluded items
EXCLUDE = "${EXCLUDE_RELEASES}".split()
def return_releases(url):
"""Return a list of releases found in the github api.
:param url: ``str``
"""
_releases = requests.get(url)
loaded_releases = _releases.json()
releases = list()
for i in loaded_releases:
for k, v in i.iteritems():
if k == 'name':
# if the name is not excluded append it
if not any([v.startswith(i) for i in EXCLUDE]):
releases.append(v)
else:
# Return a unique list.
return list(set(releases))
all_releases = list()
all_releases.extend(return_releases(url="${GITHUB_API_ENDPOINT}/tags"))
all_releases.extend(return_releases(url="${GITHUB_API_ENDPOINT}/branches"))
print(' '.join(all_releases))
EOF
)
fi
function cleanup() {
# Ensure workspaces are cleaned up
rm -rf /tmp/rpc_wheels*
rm -rf /tmp/pip*
rm -rf "${WORK_DIR}"
}
# Iterate through the list of releases and build everything that's needed
for release in ${RELEASES}; do
# Perform cleanup
cleanup
# Git clone repo
git clone "${GIT_REPO}" "${WORK_DIR}"
# checkout release
pushd "${WORK_DIR}"
git checkout "${release}"
popd
# Build wheels
/opt/rpc-wheel-builder.py -i "${REPO_PACKAGES_PATH}" \
-o "${OUTPUT_WHEEL_PATH}"/pools \
--link-dir "${OUTPUT_WHEEL_PATH}"/"${release}" \
--git-repos "${OUTPUT_GIT_PATH}" \
--release "${release}"
done
# Perform cleanup
cleanup