2016-11-21 12:20:16 -05:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
|
|
|
|
import argparse
|
2017-04-04 18:02:21 +03:00
|
|
|
import csv
|
2017-06-20 13:39:44 -04:00
|
|
|
import operator
|
2019-07-04 15:01:21 +02:00
|
|
|
import requests
|
2016-11-21 12:20:16 -05:00
|
|
|
|
|
|
|
import openstack_releases
|
|
|
|
from openstack_releases import defaults
|
|
|
|
from openstack_releases import deliverable
|
2018-02-23 14:06:59 -05:00
|
|
|
from openstack_releases import schema
|
2016-11-21 12:20:16 -05:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2018-02-23 14:06:59 -05:00
|
|
|
deliverable_schema = schema.Schema()
|
|
|
|
|
2016-11-21 12:20:16 -05:00
|
|
|
parser = argparse.ArgumentParser()
|
2017-01-17 14:02:50 -05:00
|
|
|
output_mode = parser.add_mutually_exclusive_group()
|
|
|
|
output_mode.add_argument(
|
2016-12-15 13:09:03 -05:00
|
|
|
'-v', '--verbose',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='show more than the deliverable name',
|
|
|
|
)
|
2017-01-17 14:02:50 -05:00
|
|
|
output_mode.add_argument(
|
|
|
|
'-r', '--repos',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='show the repository names not deliverable names',
|
|
|
|
)
|
2017-11-11 12:07:52 -05:00
|
|
|
output_mode.add_argument(
|
|
|
|
'-a', '--all-releases',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='show all of the releases for each deliverable',
|
|
|
|
)
|
2017-06-20 13:39:44 -04:00
|
|
|
parser.add_argument(
|
|
|
|
'--group-by',
|
|
|
|
dest='group_key',
|
|
|
|
default=None,
|
|
|
|
choices=['team', 'type', 'model'],
|
|
|
|
help='group output by the specified value',
|
|
|
|
)
|
2016-11-21 12:20:16 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--team',
|
|
|
|
help='the name of the project team, such as "Nova" or "Oslo"',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--deliverable',
|
|
|
|
help='the name of the deliverable, such as "nova" or "oslo.config"',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--series',
|
|
|
|
default=defaults.RELEASE,
|
|
|
|
help='the release series, such as "newton" or "ocata"',
|
|
|
|
)
|
2017-04-04 18:02:21 +03:00
|
|
|
parser.add_argument(
|
|
|
|
'--csvfile',
|
|
|
|
help='Save results (same as when --verbose) to CSV file',
|
|
|
|
)
|
2019-07-04 15:01:21 +02:00
|
|
|
parser.add_argument(
|
|
|
|
'--show-dates',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='Show last release date (in verbose mode)',
|
|
|
|
)
|
2019-07-04 14:48:41 +02:00
|
|
|
parser.add_argument(
|
|
|
|
'--show-tags',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='Show tags associated with deliverable (in verbose mode)',
|
|
|
|
)
|
2016-11-21 12:20:16 -05:00
|
|
|
model = parser.add_mutually_exclusive_group()
|
|
|
|
model.add_argument(
|
|
|
|
'--model',
|
2018-09-30 13:00:05 -05:00
|
|
|
help=('the release model, such as "cycle-with-rc"'
|
2016-12-01 14:04:48 -05:00
|
|
|
' or "independent"'),
|
2018-02-23 14:06:59 -05:00
|
|
|
choices=sorted(deliverable_schema.release_models + ['independent']),
|
2016-11-21 12:20:16 -05:00
|
|
|
)
|
|
|
|
model.add_argument(
|
|
|
|
'--cycle-based',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
2020-06-15 13:54:26 +02:00
|
|
|
help='include all cycle-based deliverables',
|
2016-11-21 12:20:16 -05:00
|
|
|
)
|
2019-09-30 14:52:20 -05:00
|
|
|
model.add_argument(
|
|
|
|
'--cycle-based-no-trailing',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
2020-06-15 13:54:26 +02:00
|
|
|
help='include all cycle-based deliverables, except trailing ones',
|
2019-09-30 14:52:20 -05:00
|
|
|
)
|
2016-11-21 12:20:16 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--type',
|
2018-02-02 15:06:58 -05:00
|
|
|
default=[],
|
2018-01-24 14:10:42 -05:00
|
|
|
action='append',
|
2018-02-23 14:06:59 -05:00
|
|
|
choices=sorted(deliverable_schema.release_types),
|
2016-11-21 12:20:16 -05:00
|
|
|
help='deliverable type, such as "library" or "service"',
|
|
|
|
)
|
2017-02-08 15:48:36 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--tag',
|
|
|
|
default=[],
|
|
|
|
action='append',
|
|
|
|
help='look for one more more tags on the deliverable or team',
|
|
|
|
)
|
2016-11-21 12:20:16 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--deliverables-dir',
|
|
|
|
default=openstack_releases.deliverable_dir,
|
|
|
|
help='location of deliverable files',
|
|
|
|
)
|
2016-12-01 14:04:48 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--no-stable-branch',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
|
|
|
help='limit the list to deliverables without a stable branch',
|
|
|
|
)
|
2016-12-15 13:09:14 -05:00
|
|
|
grp = parser.add_mutually_exclusive_group()
|
|
|
|
grp.add_argument(
|
2016-12-01 14:34:58 -05:00
|
|
|
'--unreleased',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
|
|
|
help='limit the list to deliverables not released in the cycle',
|
|
|
|
)
|
2019-07-04 15:01:21 +02:00
|
|
|
grp.add_argument(
|
|
|
|
'--unreleased-since',
|
|
|
|
help=('limit the list to deliverables not released in the cycle '
|
|
|
|
'since a given YYYY-MM-DD date'),
|
|
|
|
)
|
2016-12-16 10:57:25 -05:00
|
|
|
grp.add_argument(
|
|
|
|
'--missing-rc',
|
|
|
|
action='store_true',
|
|
|
|
help=('deliverables that do not have a release candidate, yet '
|
2018-09-30 13:00:05 -05:00
|
|
|
'(implies --model cycle-with-rc)'),
|
2016-12-16 10:57:25 -05:00
|
|
|
)
|
2020-10-20 16:27:42 +02:00
|
|
|
grp.add_argument(
|
|
|
|
'--is-eol',
|
|
|
|
action='store_true',
|
|
|
|
help='limit the list to deliverables EOL in the cycle',
|
|
|
|
)
|
2017-03-06 10:33:29 -05:00
|
|
|
grp.add_argument(
|
|
|
|
'--missing-final',
|
|
|
|
action='store_true',
|
|
|
|
help='deliverables that have pre-releases but no final releases, yet',
|
|
|
|
)
|
2018-06-08 17:28:25 -04:00
|
|
|
grp.add_argument(
|
|
|
|
'--forced',
|
|
|
|
action='store_true',
|
|
|
|
help=('releases that have the "forced" flag applied '
|
|
|
|
'(implies --all-releases)'),
|
|
|
|
)
|
2016-11-21 12:20:16 -05:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
series = args.series
|
2019-07-04 15:01:21 +02:00
|
|
|
GET_REFS_API = 'https://opendev.org/api/v1/repos/{}/git/{}'
|
|
|
|
GET_COMMIT_API = 'https://opendev.org/api/v1/repos/{}/git/commits/{}'
|
2016-11-21 12:20:16 -05:00
|
|
|
|
2018-09-30 13:00:05 -05:00
|
|
|
if args.missing_rc:
|
|
|
|
model = 'cycle-with-rc'
|
2016-12-16 10:57:25 -05:00
|
|
|
version_ending = None
|
2017-03-06 10:33:29 -05:00
|
|
|
elif args.missing_final:
|
|
|
|
model = args.model
|
|
|
|
version_ending = None
|
2016-12-15 13:09:14 -05:00
|
|
|
else:
|
|
|
|
model = args.model
|
|
|
|
version_ending = None
|
|
|
|
|
2019-07-04 15:01:21 +02:00
|
|
|
if args.unreleased_since:
|
|
|
|
args.show_dates = True
|
|
|
|
|
2017-01-23 13:28:35 -05:00
|
|
|
verbose_template = '{name:30} {team:20}'
|
|
|
|
if not args.unreleased:
|
2019-07-04 15:01:21 +02:00
|
|
|
verbose_template += ' {latest_release:12}'
|
|
|
|
if args.show_dates:
|
|
|
|
verbose_template += ' {last_release_date:11}'
|
2018-01-31 09:33:46 -05:00
|
|
|
if len(args.type) != 1:
|
2017-01-23 13:28:35 -05:00
|
|
|
verbose_template += ' {type:15}'
|
|
|
|
if not args.model:
|
|
|
|
verbose_template += ' {model:15}'
|
2019-07-04 14:48:41 +02:00
|
|
|
if args.show_tags:
|
|
|
|
verbose_template += ' {tags}'
|
2017-01-23 13:28:35 -05:00
|
|
|
|
2018-06-08 17:28:25 -04:00
|
|
|
if args.forced:
|
|
|
|
args.all_releases = True
|
|
|
|
|
2017-04-04 18:02:21 +03:00
|
|
|
csvfile = None
|
|
|
|
if args.csvfile:
|
|
|
|
csvfile = open(args.csvfile, 'w')
|
|
|
|
fieldnames = ['name', 'latest_release', 'repo', 'hash',
|
|
|
|
'team', 'type', 'model']
|
|
|
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
|
|
writer.writeheader()
|
|
|
|
|
2016-11-21 12:20:16 -05:00
|
|
|
all_deliv = deliverable.Deliverables(
|
|
|
|
root_dir=args.deliverables_dir,
|
|
|
|
collapse_history=False,
|
|
|
|
)
|
2018-03-01 16:16:32 -05:00
|
|
|
deliv_iter = list(all_deliv.get_deliverables(args.team, series))
|
2017-06-20 13:39:44 -04:00
|
|
|
if args.group_key:
|
|
|
|
deliv_iter = sorted(deliv_iter,
|
|
|
|
key=operator.attrgetter(args.group_key))
|
|
|
|
name_fmt = ' {}'
|
|
|
|
else:
|
|
|
|
name_fmt = '{}'
|
|
|
|
cur_group = None
|
|
|
|
for deliv in deliv_iter:
|
|
|
|
if args.group_key:
|
|
|
|
deliv_group = getattr(deliv, args.group_key)
|
2016-11-21 12:20:16 -05:00
|
|
|
|
2017-01-23 14:30:37 -05:00
|
|
|
if args.deliverable and deliv.name != args.deliverable:
|
|
|
|
continue
|
|
|
|
|
2016-12-15 13:09:14 -05:00
|
|
|
if model and deliv.model != model:
|
2016-11-21 12:20:16 -05:00
|
|
|
continue
|
|
|
|
if args.cycle_based and not deliv.is_cycle_based:
|
|
|
|
continue
|
2019-09-30 14:52:20 -05:00
|
|
|
if args.cycle_based_no_trailing and (not deliv.is_cycle_based or
|
2020-06-15 13:54:26 +02:00
|
|
|
deliv.type == 'trailing'):
|
2019-09-30 14:52:20 -05:00
|
|
|
continue
|
2018-01-24 14:10:42 -05:00
|
|
|
if args.type and deliv.type not in args.type:
|
2016-11-21 12:20:16 -05:00
|
|
|
continue
|
2016-12-01 14:04:48 -05:00
|
|
|
if args.no_stable_branch:
|
2018-08-31 10:30:10 -04:00
|
|
|
if deliv.is_branchless:
|
|
|
|
continue
|
|
|
|
if deliv.name == 'release-test':
|
|
|
|
continue
|
2020-05-18 13:19:26 +02:00
|
|
|
if deliv.stable_branch_type is None:
|
|
|
|
continue
|
2016-12-01 14:04:48 -05:00
|
|
|
if deliv.get_branch_location('stable/' + series) is not None:
|
|
|
|
continue
|
2018-02-21 09:26:08 -05:00
|
|
|
if args.unreleased and (deliv.is_released or not deliv.is_releasable):
|
2016-12-01 14:34:58 -05:00
|
|
|
continue
|
2020-10-20 16:27:42 +02:00
|
|
|
if args.is_eol and 'eol' not in deliv.latest_release:
|
|
|
|
continue
|
2018-06-12 11:03:51 -04:00
|
|
|
if version_ending and deliv.is_released:
|
|
|
|
found = False
|
|
|
|
for release in deliv.releases:
|
|
|
|
if release.version.endswith(version_ending):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found:
|
|
|
|
continue
|
2018-02-21 09:26:08 -05:00
|
|
|
if args.missing_rc and deliv.is_released and 'rc' in deliv.latest_release:
|
2016-12-16 10:57:25 -05:00
|
|
|
continue
|
2017-02-08 15:48:36 -05:00
|
|
|
if args.tag:
|
|
|
|
tags = deliv.tags
|
2017-11-11 12:07:22 -05:00
|
|
|
ignore = False
|
2017-02-08 15:48:36 -05:00
|
|
|
for t in args.tag:
|
|
|
|
if t not in tags:
|
2017-11-11 12:07:22 -05:00
|
|
|
ignore = True
|
|
|
|
break
|
|
|
|
if ignore:
|
|
|
|
continue
|
|
|
|
|
2017-11-11 12:07:08 -05:00
|
|
|
tag_str = '(' + ', '.join(deliv.tags) + ')'
|
|
|
|
|
2017-03-06 10:33:29 -05:00
|
|
|
if args.missing_final and deliv.latest_release:
|
|
|
|
if not ('rc' in deliv.latest_release or
|
|
|
|
'a' in deliv.latest_release or
|
|
|
|
'b' in deliv.latest_release):
|
|
|
|
continue
|
2017-04-04 18:02:21 +03:00
|
|
|
|
2019-07-04 15:01:21 +02:00
|
|
|
release_date = {}
|
|
|
|
if (args.show_dates or args.unreleased_since) and deliv.is_released:
|
|
|
|
if args.all_releases:
|
|
|
|
versions = [a.version for a in deliv.releases]
|
|
|
|
else:
|
|
|
|
versions = [deliv.releases[-1].version]
|
|
|
|
for ver in versions:
|
|
|
|
ref = "refs/tags/{}".format(ver)
|
|
|
|
api = GET_REFS_API.format(deliv.repos[0], ref)
|
|
|
|
tagsjson = requests.get(api).json()
|
|
|
|
|
|
|
|
# Gitea returns either a single tag object, or a list of
|
|
|
|
# tag objects containing the provided string. So we need to
|
|
|
|
# filter the list for the exact match.
|
|
|
|
if isinstance(tagsjson, list):
|
|
|
|
for release_tag in tagsjson:
|
|
|
|
if release_tag['ref'] == ref:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
release_tag = tagsjson
|
|
|
|
|
|
|
|
release_sha = release_tag['object']['sha']
|
|
|
|
api = GET_COMMIT_API.format(deliv.repos[0], release_sha)
|
|
|
|
release_commit = requests.get(api).json()['commit']
|
|
|
|
release_date[ver] = release_commit['author']['date'][0:10]
|
|
|
|
|
|
|
|
if args.unreleased_since and deliv.is_released:
|
|
|
|
if release_date[ver] >= args.unreleased_since:
|
|
|
|
continue
|
|
|
|
|
2017-04-04 18:02:21 +03:00
|
|
|
if csvfile:
|
|
|
|
rel = (deliv.releases or [{}])[-1]
|
|
|
|
for prj in rel.get('projects', [{}]):
|
|
|
|
writer.writerow({
|
|
|
|
'name': deliv.name,
|
|
|
|
'latest_release': rel.get('version', None),
|
|
|
|
'repo': prj.get('repo', None),
|
|
|
|
'hash': prj.get('hash', None),
|
|
|
|
'team': deliv.team,
|
|
|
|
'type': deliv.type,
|
|
|
|
'model': deliv.model,
|
|
|
|
})
|
2017-11-11 12:07:52 -05:00
|
|
|
elif args.all_releases:
|
|
|
|
for r in deliv.releases:
|
2018-06-08 17:28:25 -04:00
|
|
|
if args.forced and not r.was_forced:
|
|
|
|
continue
|
2017-11-11 12:07:52 -05:00
|
|
|
print(verbose_template.format(
|
|
|
|
name=deliv.name,
|
2018-06-08 17:26:00 -04:00
|
|
|
latest_release=r.version,
|
2019-07-04 15:01:21 +02:00
|
|
|
last_release_date=release_date.get(r.version, ''),
|
2017-11-11 12:07:52 -05:00
|
|
|
team=deliv.team,
|
|
|
|
type=deliv.type,
|
|
|
|
model=deliv.model,
|
|
|
|
tags=tag_str,
|
|
|
|
))
|
2017-04-04 18:02:21 +03:00
|
|
|
elif args.verbose:
|
2017-01-23 13:28:35 -05:00
|
|
|
print(verbose_template.format(
|
|
|
|
name=deliv.name,
|
2017-07-20 14:04:34 -04:00
|
|
|
latest_release=deliv.latest_release or '',
|
2019-07-04 15:01:21 +02:00
|
|
|
last_release_date=release_date.get(deliv.latest_release, ''),
|
2017-01-23 13:28:35 -05:00
|
|
|
team=deliv.team,
|
|
|
|
type=deliv.type,
|
|
|
|
model=deliv.model,
|
2017-11-11 12:07:08 -05:00
|
|
|
tags=tag_str,
|
2017-01-23 13:28:35 -05:00
|
|
|
))
|
2017-01-17 14:02:50 -05:00
|
|
|
elif args.repos:
|
2017-06-20 13:39:44 -04:00
|
|
|
if args.group_key and cur_group != deliv_group:
|
|
|
|
cur_group = deliv_group
|
|
|
|
print(cur_group)
|
2017-01-17 14:02:50 -05:00
|
|
|
for r in sorted(deliv.repos):
|
2017-06-20 13:39:44 -04:00
|
|
|
print(name_fmt.format(r))
|
2016-12-15 13:09:03 -05:00
|
|
|
else:
|
2017-06-20 13:39:44 -04:00
|
|
|
if args.group_key and cur_group != deliv_group:
|
|
|
|
cur_group = deliv_group
|
|
|
|
print(cur_group)
|
|
|
|
print(name_fmt.format(deliv.name))
|
2017-04-04 18:02:21 +03:00
|
|
|
|
|
|
|
if csvfile:
|
|
|
|
csvfile.close()
|