Merge branch 'stable-2.16' into stable-3.0
* stable-2.16: FakeGroupAuditService: Make auditEvents final ErrorProne: Increase severity of MutableConstantField to ERROR ConfigUpdatedEvent: Use immutable type in field declaration Add missing slf4j dependencies in BUILD file JGitMetricModule: fix WindowCache miss ration which wasn't reported Revert "JGitMetricModule: Replace anonymous Supplier instances with method references" Suppress generic logging on docker start for ElasticSearch container Replace deprecated OptionParser with ArgumentParser Change-Id: I36a7ce5c043bd43d90fb415fcec67440db2b5c84
This commit is contained in:
@@ -14,7 +14,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from optparse import OptionParser
|
import argparse
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -239,34 +239,34 @@ LINK_SCRIPT = """
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
opts = OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts.add_option('-o', '--out', help='output file')
|
parser.add_argument('-o', '--out', help='output file')
|
||||||
opts.add_option('-s', '--src', help='source file')
|
parser.add_argument('-s', '--src', help='source file')
|
||||||
opts.add_option('-x', '--suffix', help='suffix for included filenames')
|
parser.add_argument('-x', '--suffix', help='suffix for included filenames')
|
||||||
opts.add_option('-b', '--searchbox', action="store_true", default=True,
|
parser.add_argument('-b', '--searchbox', action="store_true", default=True,
|
||||||
help="generate the search boxes")
|
help="generate the search boxes")
|
||||||
opts.add_option('--no-searchbox', action="store_false", dest='searchbox',
|
parser.add_argument('--no-searchbox', action="store_false", dest='searchbox',
|
||||||
help="don't generate the search boxes")
|
help="don't generate the search boxes")
|
||||||
opts.add_option('--site-search', action="store", metavar="SITE",
|
parser.add_argument('--site-search', action="store", metavar="SITE",
|
||||||
help=("generate the search box using google. SITE should " +
|
help=("generate the search box using google. SITE should " +
|
||||||
"point to the domain/path of the site, eg. " +
|
"point to the domain/path of the site, eg. " +
|
||||||
"gerrit-review.googlesource.com/Documentation"))
|
"gerrit-review.googlesource.com/Documentation"))
|
||||||
options, _ = opts.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if options.site_search:
|
if args.site_search:
|
||||||
SEARCH_BOX = (SEARCH_BOX %
|
SEARCH_BOX = (SEARCH_BOX %
|
||||||
GOOGLE_SITE_SEARCH.replace("@SITE@", options.site_search))
|
GOOGLE_SITE_SEARCH.replace("@SITE@", args.site_search))
|
||||||
else:
|
else:
|
||||||
SEARCH_BOX = SEARCH_BOX % BUILTIN_SEARCH
|
SEARCH_BOX = SEARCH_BOX % BUILTIN_SEARCH
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
out_file = open(options.out, 'w', errors='ignore')
|
out_file = open(args.out, 'w', errors='ignore')
|
||||||
src_file = open(options.src, 'r', errors='ignore')
|
src_file = open(args.src, 'r', errors='ignore')
|
||||||
except TypeError:
|
except TypeError:
|
||||||
out_file = open(options.out, 'w')
|
out_file = open(args.out, 'w')
|
||||||
src_file = open(options.src, 'r')
|
src_file = open(args.src, 'r')
|
||||||
last_line = ''
|
last_line = ''
|
||||||
ignore_next_line = False
|
ignore_next_line = False
|
||||||
last_title = ''
|
last_title = ''
|
||||||
@@ -277,14 +277,14 @@ try:
|
|||||||
last_line = ''
|
last_line = ''
|
||||||
elif PAT_SEARCHBOX.match(last_line):
|
elif PAT_SEARCHBOX.match(last_line):
|
||||||
# Case of 'SEARCHBOX\n---------'
|
# Case of 'SEARCHBOX\n---------'
|
||||||
if options.searchbox:
|
if args.searchbox:
|
||||||
out_file.write(SEARCH_BOX)
|
out_file.write(SEARCH_BOX)
|
||||||
last_line = ''
|
last_line = ''
|
||||||
elif PAT_INCLUDE.match(line):
|
elif PAT_INCLUDE.match(line):
|
||||||
# Case of 'include::<filename>'
|
# Case of 'include::<filename>'
|
||||||
match = PAT_INCLUDE.match(line)
|
match = PAT_INCLUDE.match(line)
|
||||||
out_file.write(last_line)
|
out_file.write(last_line)
|
||||||
last_line = match.group(1) + options.suffix + match.group(2) + '\n'
|
last_line = match.group(1) + args.suffix + match.group(2) + '\n'
|
||||||
elif PAT_STARS.match(line):
|
elif PAT_STARS.match(line):
|
||||||
if PAT_TITLE.match(last_line):
|
if PAT_TITLE.match(last_line):
|
||||||
# Case of the title in '.<title>\n****\nget::<url>\n****'
|
# Case of the title in '.<title>\n****\nget::<url>\n****'
|
||||||
@@ -310,5 +310,5 @@ try:
|
|||||||
out_file.close()
|
out_file.close()
|
||||||
except IOError as err:
|
except IOError as err:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"error while expanding %s to %s: %s" % (options.src, options.out, err))
|
"error while expanding %s to %s: %s" % (args.src, args.out, err))
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|||||||
@@ -22,9 +22,9 @@ TODO(hiesel): Add comments
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import argparse
|
||||||
import atexit
|
import atexit
|
||||||
import json
|
import json
|
||||||
import optparse
|
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
@@ -275,22 +275,22 @@ def clean_up():
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
p = optparse.OptionParser()
|
p = argparse.ArgumentParser()
|
||||||
p.add_option("-u", "--user_count", action="store",
|
p.add_argument("-u", "--user_count", action="store",
|
||||||
default=100,
|
default=100,
|
||||||
type='int',
|
type=int,
|
||||||
help="number of users to generate")
|
help="number of users to generate")
|
||||||
p.add_option("-p", "--port", action="store",
|
p.add_argument("-p", "--port", action="store",
|
||||||
default=8080,
|
default=8080,
|
||||||
type='int',
|
type=int,
|
||||||
help="port of server")
|
help="port of server")
|
||||||
(options, _) = p.parse_args()
|
args = p.parse_args()
|
||||||
global BASE_URL
|
global BASE_URL
|
||||||
BASE_URL = BASE_URL % options.port
|
BASE_URL = BASE_URL % args.port
|
||||||
print(BASE_URL)
|
print(BASE_URL)
|
||||||
|
|
||||||
set_up()
|
set_up()
|
||||||
gerrit_users = get_random_users(options.user_count)
|
gerrit_users = get_random_users(args.user_count)
|
||||||
|
|
||||||
group_names = create_gerrit_groups()
|
group_names = create_gerrit_groups()
|
||||||
for idx, u in enumerate(gerrit_users):
|
for idx, u in enumerate(gerrit_users):
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
package com.google.gerrit.metrics.proc;
|
package com.google.gerrit.metrics.proc;
|
||||||
|
|
||||||
|
import com.google.common.base.Supplier;
|
||||||
import com.google.gerrit.metrics.CallbackMetric1;
|
import com.google.gerrit.metrics.CallbackMetric1;
|
||||||
import com.google.gerrit.metrics.Description;
|
import com.google.gerrit.metrics.Description;
|
||||||
import com.google.gerrit.metrics.Description.Units;
|
import com.google.gerrit.metrics.Description.Units;
|
||||||
@@ -33,13 +34,23 @@ public class JGitMetricModule extends MetricModule {
|
|||||||
new Description("Bytes of memory retained in JGit block cache.")
|
new Description("Bytes of memory retained in JGit block cache.")
|
||||||
.setGauge()
|
.setGauge()
|
||||||
.setUnit(Units.BYTES),
|
.setUnit(Units.BYTES),
|
||||||
WindowCacheStats.getStats()::getOpenByteCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getOpenByteCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/open_files",
|
"jgit/block_cache/open_files",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("File handles held open by JGit block cache.").setGauge().setUnit("fds"),
|
new Description("File handles held open by JGit block cache.").setGauge().setUnit("fds"),
|
||||||
WindowCacheStats.getStats()::getOpenFileCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getOpenFileCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/avg_load_time",
|
"jgit/block_cache/avg_load_time",
|
||||||
@@ -47,61 +58,110 @@ public class JGitMetricModule extends MetricModule {
|
|||||||
new Description("Average time to load a cache entry for JGit block cache.")
|
new Description("Average time to load a cache entry for JGit block cache.")
|
||||||
.setGauge()
|
.setGauge()
|
||||||
.setUnit(Units.NANOSECONDS),
|
.setUnit(Units.NANOSECONDS),
|
||||||
WindowCacheStats.getStats()::getAverageLoadTime);
|
new Supplier<Double>() {
|
||||||
|
@Override
|
||||||
|
public Double get() {
|
||||||
|
return WindowCacheStats.getStats().getAverageLoadTime();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/eviction_count",
|
"jgit/block_cache/eviction_count",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("Cache evictions for JGit block cache.").setGauge(),
|
new Description("Cache evictions for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getEvictionCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getEvictionCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/eviction_ratio",
|
"jgit/block_cache/eviction_ratio",
|
||||||
Double.class,
|
Double.class,
|
||||||
new Description("Cache eviction ratio for JGit block cache.").setGauge(),
|
new Description("Cache eviction ratio for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getEvictionRatio);
|
new Supplier<Double>() {
|
||||||
|
@Override
|
||||||
|
public Double get() {
|
||||||
|
return WindowCacheStats.getStats().getEvictionRatio();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/hit_count",
|
"jgit/block_cache/hit_count",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("Cache hits for JGit block cache.").setGauge(),
|
new Description("Cache hits for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getHitCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getHitCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/hit_ratio",
|
"jgit/block_cache/hit_ratio",
|
||||||
Double.class,
|
Double.class,
|
||||||
new Description("Cache hit ratio for JGit block cache.").setGauge(),
|
new Description("Cache hit ratio for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getHitRatio);
|
new Supplier<Double>() {
|
||||||
|
@Override
|
||||||
|
public Double get() {
|
||||||
|
return WindowCacheStats.getStats().getHitRatio();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/load_failure_count",
|
"jgit/block_cache/load_failure_count",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("Failed cache loads for JGit block cache.").setGauge(),
|
new Description("Failed cache loads for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getLoadFailureCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getLoadFailureCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/load_failure_ratio",
|
"jgit/block_cache/load_failure_ratio",
|
||||||
Double.class,
|
Double.class,
|
||||||
new Description("Failed cache load ratio for JGit block cache.").setGauge(),
|
new Description("Failed cache load ratio for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getLoadFailureRatio);
|
new Supplier<Double>() {
|
||||||
|
@Override
|
||||||
|
public Double get() {
|
||||||
|
return WindowCacheStats.getStats().getLoadFailureRatio();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/load_success_count",
|
"jgit/block_cache/load_success_count",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("Successfull cache loads for JGit block cache.").setGauge(),
|
new Description("Successfull cache loads for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getLoadSuccessCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getLoadSuccessCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/miss_count",
|
"jgit/block_cache/miss_count",
|
||||||
Long.class,
|
Long.class,
|
||||||
new Description("Cache misses for JGit block cache.").setGauge(),
|
new Description("Cache misses for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getMissCount);
|
new Supplier<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long get() {
|
||||||
|
return WindowCacheStats.getStats().getMissCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
"jgit/block_cache/miss_ratio",
|
"jgit/block_cache/miss_ratio",
|
||||||
Double.class,
|
Double.class,
|
||||||
new Description("Cache miss ratio for JGit block cache.").setGauge(),
|
new Description("Cache miss ratio for JGit block cache.").setGauge(),
|
||||||
WindowCacheStats.getStats()::getMissRatio);
|
new Supplier<Double>() {
|
||||||
|
@Override
|
||||||
|
public Double get() {
|
||||||
|
return WindowCacheStats.getStats().getMissRatio();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
CallbackMetric1<String, Long> repoEnt =
|
CallbackMetric1<String, Long> repoEnt =
|
||||||
metrics.newCallbackMetric(
|
metrics.newCallbackMetric(
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ java_library(
|
|||||||
"//lib/guice",
|
"//lib/guice",
|
||||||
"//lib/httpcomponents:httpcore",
|
"//lib/httpcomponents:httpcore",
|
||||||
"//lib/jgit/org.eclipse.jgit:jgit",
|
"//lib/jgit/org.eclipse.jgit:jgit",
|
||||||
|
"//lib/log:api",
|
||||||
"//lib/testcontainers",
|
"//lib/testcontainers",
|
||||||
"//lib/testcontainers:testcontainers-elasticsearch",
|
"//lib/testcontainers:testcontainers-elasticsearch",
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ package com.google.gerrit.elasticsearch;
|
|||||||
|
|
||||||
import org.apache.http.HttpHost;
|
import org.apache.http.HttpHost;
|
||||||
import org.junit.AssumptionViolatedException;
|
import org.junit.AssumptionViolatedException;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||||
|
|
||||||
/* Helper class for running ES integration tests in docker container */
|
/* Helper class for running ES integration tests in docker container */
|
||||||
@@ -74,6 +76,11 @@ public class ElasticContainer extends ElasticsearchContainer {
|
|||||||
super(getImageName(version));
|
super(getImageName(version));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Logger logger() {
|
||||||
|
return LoggerFactory.getLogger("org.testcontainers");
|
||||||
|
}
|
||||||
|
|
||||||
public HttpHost getHttpHost() {
|
public HttpHost getHttpHost() {
|
||||||
return new HttpHost(getContainerIpAddress(), getMappedPort(ELASTICSEARCH_DEFAULT_PORT));
|
return new HttpHost(getContainerIpAddress(), getMappedPort(ELASTICSEARCH_DEFAULT_PORT));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ java_library(
|
|||||||
name = "api",
|
name = "api",
|
||||||
data = ["//lib:LICENSE-slf4j"],
|
data = ["//lib:LICENSE-slf4j"],
|
||||||
visibility = [
|
visibility = [
|
||||||
|
"//javatests/com/google/gerrit/elasticsearch:__pkg__",
|
||||||
"//lib/jgit/org.eclipse.jgit:__pkg__",
|
"//lib/jgit/org.eclipse.jgit:__pkg__",
|
||||||
"//plugins:__pkg__",
|
"//plugins:__pkg__",
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -15,8 +15,8 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from optparse import OptionParser
|
|
||||||
from os import link, makedirs, path, remove
|
from os import link, makedirs, path, remove
|
||||||
import shutil
|
import shutil
|
||||||
from subprocess import check_call, CalledProcessError
|
from subprocess import check_call, CalledProcessError
|
||||||
@@ -75,13 +75,13 @@ def cache_entry(args):
|
|||||||
return path.join(CACHE_DIR, name)
|
return path.join(CACHE_DIR, name)
|
||||||
|
|
||||||
|
|
||||||
opts = OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts.add_option('-o', help='local output file')
|
parser.add_argument('-o', help='local output file')
|
||||||
opts.add_option('-u', help='URL to download')
|
parser.add_argument('-u', help='URL to download')
|
||||||
opts.add_option('-v', help='expected content SHA-1')
|
parser.add_argument('-v', help='expected content SHA-1')
|
||||||
opts.add_option('-x', action='append', help='file to delete from ZIP')
|
parser.add_argument('-x', action='append', help='file to delete from ZIP')
|
||||||
opts.add_option('--exclude_java_sources', action='store_true')
|
parser.add_argument('--exclude_java_sources', action='store_true')
|
||||||
args, _ = opts.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
root_dir = args.o
|
root_dir = args.o
|
||||||
while root_dir and path.dirname(root_dir) != root_dir:
|
while root_dir and path.dirname(root_dir) != root_dir:
|
||||||
|
|||||||
@@ -22,10 +22,10 @@ python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl \
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import optparse
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -141,11 +141,11 @@ def bower_command(args):
|
|||||||
os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
|
os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
|
||||||
|
|
||||||
|
|
||||||
def main(args):
|
def main():
|
||||||
opts = optparse.OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts.add_option('-w', help='.bzl output for WORKSPACE')
|
parser.add_argument('-w', help='.bzl output for WORKSPACE')
|
||||||
opts.add_option('-b', help='.bzl output for //lib:BUILD')
|
parser.add_argument('-b', help='.bzl output for //lib:BUILD')
|
||||||
opts, args = opts.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
target_str = subprocess.check_output([
|
target_str = subprocess.check_output([
|
||||||
"bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
|
"bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
|
||||||
@@ -166,12 +166,12 @@ def main(args):
|
|||||||
cmd = bower_command(["install"])
|
cmd = bower_command(["install"])
|
||||||
|
|
||||||
build_out = sys.stdout
|
build_out = sys.stdout
|
||||||
if opts.b:
|
if args.b:
|
||||||
build_out = open(opts.b + ".tmp", 'w')
|
build_out = open(args.b + ".tmp", 'w')
|
||||||
|
|
||||||
ws_out = sys.stdout
|
ws_out = sys.stdout
|
||||||
if opts.b:
|
if args.b:
|
||||||
ws_out = open(opts.w + ".tmp", 'w')
|
ws_out = open(args.w + ".tmp", 'w')
|
||||||
|
|
||||||
header = """# DO NOT EDIT
|
header = """# DO NOT EDIT
|
||||||
# generated with the following command:
|
# generated with the following command:
|
||||||
@@ -193,8 +193,8 @@ def main(args):
|
|||||||
build_out.close()
|
build_out.close()
|
||||||
|
|
||||||
os.chdir(oldwd)
|
os.chdir(oldwd)
|
||||||
os.rename(opts.w + ".tmp", opts.w)
|
os.rename(args.w + ".tmp", args.w)
|
||||||
os.rename(opts.b + ".tmp", opts.b)
|
os.rename(args.b + ".tmp", args.b)
|
||||||
|
|
||||||
|
|
||||||
def dump_workspace(data, seeds, out):
|
def dump_workspace(data, seeds, out):
|
||||||
@@ -259,4 +259,4 @@ def interpret_bower_json(seeds, ws_out, build_out):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main(sys.argv[1:])
|
main()
|
||||||
|
|||||||
@@ -15,9 +15,9 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import optparse
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -80,44 +80,44 @@ def cache_entry(name, package, version, sha1):
|
|||||||
return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
|
return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
|
||||||
|
|
||||||
|
|
||||||
def main(args):
|
def main():
|
||||||
opts = optparse.OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts.add_option('-n', help='short name of component')
|
parser.add_argument('-n', help='short name of component')
|
||||||
opts.add_option('-b', help='bower command')
|
parser.add_argument('-b', help='bower command')
|
||||||
opts.add_option('-p', help='full package name of component')
|
parser.add_argument('-p', help='full package name of component')
|
||||||
opts.add_option('-v', help='version number')
|
parser.add_argument('-v', help='version number')
|
||||||
opts.add_option('-s', help='expected content sha1')
|
parser.add_argument('-s', help='expected content sha1')
|
||||||
opts.add_option('-o', help='output file location')
|
parser.add_argument('-o', help='output file location')
|
||||||
opts, args_ = opts.parse_args(args)
|
args = parser.parse_args()
|
||||||
|
|
||||||
assert opts.p
|
assert args.p
|
||||||
assert opts.v
|
assert args.v
|
||||||
assert opts.n
|
assert args.n
|
||||||
|
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
outzip = os.path.join(cwd, opts.o)
|
outzip = os.path.join(cwd, args.o)
|
||||||
cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
|
cached = cache_entry(args.n, args.p, args.v, args.s)
|
||||||
|
|
||||||
if not os.path.exists(cached):
|
if not os.path.exists(cached):
|
||||||
info = bower_info(opts.b, opts.n, opts.p, opts.v)
|
info = bower_info(args.b, args.n, args.p, args.v)
|
||||||
ignore_deps(info)
|
ignore_deps(info)
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
bower_cmd(
|
bower_cmd(
|
||||||
opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
|
args.b, '--quiet', 'install', '%s#%s' % (args.p, args.v)))
|
||||||
bc = os.path.join(cwd, 'bower_components')
|
bc = os.path.join(cwd, 'bower_components')
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
|
['zip', '-q', '--exclude', '.bower.json', '-r', cached, args.n],
|
||||||
cwd=bc)
|
cwd=bc)
|
||||||
|
|
||||||
if opts.s:
|
if args.s:
|
||||||
path = os.path.join(bc, opts.n)
|
path = os.path.join(bc, args.n)
|
||||||
sha1 = bowerutil.hash_bower_component(
|
sha1 = bowerutil.hash_bower_component(
|
||||||
hashlib.sha1(), path).hexdigest()
|
hashlib.sha1(), path).hexdigest()
|
||||||
if opts.s != sha1:
|
if args.s != sha1:
|
||||||
print((
|
print((
|
||||||
'%s#%s:\n'
|
'%s#%s:\n'
|
||||||
'expected %s\n'
|
'expected %s\n'
|
||||||
'received %s\n') % (opts.p, opts.v, opts.s, sha1),
|
'received %s\n') % (args.p, args.v, args.s, sha1),
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
try:
|
try:
|
||||||
os.remove(cached)
|
os.remove(cached)
|
||||||
@@ -132,4 +132,4 @@ def main(args):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main(sys.argv[1:]))
|
sys.exit(main())
|
||||||
|
|||||||
@@ -14,20 +14,20 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from optparse import OptionParser
|
import argparse
|
||||||
from os import path, environ
|
from os import path, environ
|
||||||
from subprocess import check_output, CalledProcessError
|
from subprocess import check_output, CalledProcessError
|
||||||
from sys import stderr
|
from sys import stderr
|
||||||
|
|
||||||
opts = OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts.add_option('--repository', help='maven repository id')
|
parser.add_argument('--repository', help='maven repository id')
|
||||||
opts.add_option('--url', help='maven repository url')
|
parser.add_argument('--url', help='maven repository url')
|
||||||
opts.add_option('-o')
|
parser.add_argument('-o')
|
||||||
opts.add_option('-a', help='action (valid actions are: install,deploy)')
|
parser.add_argument('-a', help='action (valid actions are: install,deploy)')
|
||||||
opts.add_option('-v', help='gerrit version')
|
parser.add_argument('-v', help='gerrit version')
|
||||||
opts.add_option('-s', action='append', help='triplet of artifactId:type:path')
|
parser.add_argument('-s', action='append', help='triplet of artifactId:type:path')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
args, ctx = opts.parse_args()
|
|
||||||
if not args.v:
|
if not args.v:
|
||||||
print('version is empty', file=stderr)
|
print('version is empty', file=stderr)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|||||||
@@ -14,21 +14,16 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from optparse import OptionParser
|
import argparse
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
parser = OptionParser()
|
parser = argparse.ArgumentParser()
|
||||||
opts, args = parser.parse_args()
|
parser.add_argument('version')
|
||||||
|
args = parser.parse_args()
|
||||||
if not len(args):
|
|
||||||
parser.error('not enough arguments')
|
|
||||||
elif len(args) > 1:
|
|
||||||
parser.error('too many arguments')
|
|
||||||
|
|
||||||
DEST_PATTERN = r'\g<1>%s\g<3>' % args[0]
|
|
||||||
|
|
||||||
|
DEST_PATTERN = r'\g<1>%s\g<3>' % args.version
|
||||||
|
|
||||||
def replace_in_file(filename, src_pattern):
|
def replace_in_file(filename, src_pattern):
|
||||||
try:
|
try:
|
||||||
|
|||||||
Reference in New Issue
Block a user