Merge branch 'stable-2.15'
* stable-2.15: AsyncReceiveCommits#onPreReceive: Early exit when commands already processed Replace download links for releases index page concept-changes: Fix 'invalid reference: topic' warning concept-changes: Fix 'invalid reference: submit-strategy' warning Set version to 2.14.14 Update jruby to 9.1.17 and asciidoctorj to v1.5.7 Elasticsearch: Ensure request URI is always prefixed with "/" AbstractElasticIndex: Factor out more variants of {post,perform}Request rest-api-projects: Fix "unterminated listing block" warning config-gerrit: Fix "invalid reference: database.h2.cachesize" warning Bazel: Specify name for downloaded file to http_file starlark rule Set version to 2.14.14-SNAPSHOT CreateProject: Provide signed push option on project creation Revert "AsyncReceiveCommits: Move ReceiveCommits into Worker" Change-Id: I839334906060534e8dd9bd2658c6eb57ccd79c69
This commit is contained in:
commit
b6fa44f8b5
@ -55,7 +55,7 @@ are not required to review it.
|
||||
|An optional topic.
|
||||
|
||||
|Strategy
|
||||
|The <<submit-strategy>> for the change.
|
||||
|The <<submit-strategies,submit strategy>> for the change.
|
||||
|
||||
|Code Review
|
||||
|Displays the Code Review status for the change.
|
||||
@ -84,10 +84,10 @@ listed next to the change message. These related changes are grouped together in
|
||||
several categories, including:
|
||||
|
||||
* Relation Chain. These changes are related by parent-child relationships,
|
||||
regardless of <<topics>>.
|
||||
regardless of <<topic,topic>>.
|
||||
* Merge Conflicts. These are changes in which there is a merge conflict with
|
||||
the current change.
|
||||
* Submitted Together. These are changes that share the same <<topics>>.
|
||||
* Submitted Together. These are changes that share the same <<topic,topic>>.
|
||||
|
||||
An arrow indicates the change you are currently viewing.
|
||||
|
||||
|
@ -690,7 +690,7 @@ H2 uses memory to cache its database content. The parameter `h2CacheSize`
|
||||
allows to limit the memory used by H2 and thus prevent out-of-memory
|
||||
caused by the H2 database using too much memory.
|
||||
+
|
||||
See <<database.h2.cachesize,database.h2.cachesize>> for a detailed discussion.
|
||||
See <<database.h2.cacheSize,database.h2.cacheSize>> for a detailed discussion.
|
||||
+
|
||||
Default is unset, using up to half of the available memory.
|
||||
+
|
||||
|
@ -3570,6 +3570,12 @@ Whether the usage of Change-Ids is required for the project (`TRUE`,
|
||||
`FALSE`, `INHERIT`).
|
||||
This property is deprecated and will be removed in
|
||||
a future release.
|
||||
|`enable_signed_push` |`INHERIT` if not set|
|
||||
Whether signed push validation is enabled on the project (`TRUE`,
|
||||
`FALSE`, `INHERIT`).
|
||||
|`require_signed_push` |`INHERIT` if not set|
|
||||
Whether signed push validation is required on the project (`TRUE`,
|
||||
`FALSE`, `INHERIT`).
|
||||
|`max_object_size_limit` |optional|
|
||||
Max allowed Git object size for this project.
|
||||
Common unit suffixes of 'k', 'm', or 'g' are supported.
|
||||
@ -3707,8 +3713,6 @@ The path to the `GerritSiteHeader.html` file.
|
||||
The path to the `GerritSiteFooter.html` file.
|
||||
|=============================
|
||||
|
||||
----
|
||||
|
||||
GERRIT
|
||||
------
|
||||
Part of link:index.html[Gerrit Code Review]
|
||||
|
@ -25,6 +25,7 @@ http_archive(
|
||||
# https://github.com/google/closure-compiler/blob/master/contrib/externs/polymer-1.0.js
|
||||
http_file(
|
||||
name = "polymer_closure",
|
||||
downloaded_file_path = "polymer_closure.js",
|
||||
sha256 = "5a589bdba674e1fec7188e9251c8624ebf2d4d969beb6635f9148f420d1e08b1",
|
||||
urls = ["https://raw.githubusercontent.com/google/closure-compiler/775609aad61e14aef289ebec4bfc09ad88877f9e/contrib/externs/polymer-1.0.js"],
|
||||
)
|
||||
@ -1066,14 +1067,14 @@ maven_jar(
|
||||
|
||||
maven_jar(
|
||||
name = "asciidoctor",
|
||||
artifact = "org.asciidoctor:asciidoctorj:1.5.6",
|
||||
sha1 = "bb757d4b8b0f8438ce2ed781f6688cc6c01d9237",
|
||||
artifact = "org.asciidoctor:asciidoctorj:1.5.7",
|
||||
sha1 = "8e8c1d8fc6144405700dd8df3b177f2801ac5987",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "jruby",
|
||||
artifact = "org.jruby:jruby-complete:9.1.13.0",
|
||||
sha1 = "8903bf42272062e87a7cbc1d98919e0729a9939f",
|
||||
artifact = "org.jruby:jruby-complete:9.1.17.0",
|
||||
sha1 = "76716d529710fc03d1d429b43e3cedd4419f78d4",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
|
@ -502,6 +502,8 @@ public abstract class AbstractDaemonTest {
|
||||
in.useSignedOffBy = ann.useSignedOffBy();
|
||||
in.useContentMerge = ann.useContentMerge();
|
||||
in.rejectEmptyCommit = ann.rejectEmptyCommit();
|
||||
in.enableSignedPush = ann.enableSignedPush();
|
||||
in.requireSignedPush = ann.requireSignedPush();
|
||||
} else {
|
||||
// Defaults should match TestProjectConfig, omitting nullable values.
|
||||
in.createEmptyCommit = true;
|
||||
|
@ -47,6 +47,10 @@ public @interface TestProjectInput {
|
||||
|
||||
InheritableBoolean rejectEmptyCommit() default InheritableBoolean.INHERIT;
|
||||
|
||||
InheritableBoolean enableSignedPush() default InheritableBoolean.INHERIT;
|
||||
|
||||
InheritableBoolean requireSignedPush() default InheritableBoolean.INHERIT;
|
||||
|
||||
// Fields specific to acceptance test behavior.
|
||||
|
||||
/** Username to use for initial clone, passed to {@link AccountCreator}. */
|
||||
|
@ -26,6 +26,7 @@ import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.flogger.FluentLogger;
|
||||
import com.google.common.io.CharStreams;
|
||||
import com.google.gerrit.common.Nullable;
|
||||
import com.google.gerrit.elasticsearch.ElasticMapping.MappingProperties;
|
||||
import com.google.gerrit.elasticsearch.builders.QueryBuilder;
|
||||
import com.google.gerrit.elasticsearch.builders.SearchSourceBuilder;
|
||||
@ -160,7 +161,7 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
@Override
|
||||
public void delete(K id) throws IOException {
|
||||
String uri = getURI(type, BULK);
|
||||
Response response = postRequest(getDeleteActions(id), uri, getRefreshParam());
|
||||
Response response = postRequest(uri, getDeleteActions(id), getRefreshParam());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
@ -172,10 +173,10 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
public void deleteAll() throws IOException {
|
||||
// Delete the index, if it exists.
|
||||
String endpoint = indexName + client.adapter().indicesExistParam();
|
||||
Response response = client.get().performRequest(new Request("HEAD", endpoint));
|
||||
Response response = performRequest("HEAD", endpoint);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == HttpStatus.SC_OK) {
|
||||
response = client.get().performRequest(new Request("DELETE", indexName));
|
||||
response = performRequest("DELETE", indexName);
|
||||
statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
@ -185,7 +186,7 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
|
||||
// Recreate the index.
|
||||
String indexCreationFields = concatJsonString(getSettings(), getMappings());
|
||||
response = performRequest("PUT", indexCreationFields, indexName, Collections.emptyMap());
|
||||
response = performRequest("PUT", indexName, indexCreationFields);
|
||||
statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
String error = String.format("Failed to create index %s: %s", indexName, statusCode);
|
||||
@ -297,20 +298,36 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
return encodedIndexName + "/" + encodedType + "/" + request;
|
||||
}
|
||||
|
||||
protected Response postRequest(Object payload, String uri, Map<String, String> params)
|
||||
protected Response postRequest(String uri, Object payload) throws IOException {
|
||||
return performRequest("POST", uri, payload);
|
||||
}
|
||||
|
||||
protected Response postRequest(String uri, Object payload, Map<String, String> params)
|
||||
throws IOException {
|
||||
return performRequest("POST", payload, uri, params);
|
||||
return performRequest("POST", uri, payload, params);
|
||||
}
|
||||
|
||||
private String concatJsonString(String target, String addition) {
|
||||
return target.substring(0, target.length() - 1) + "," + addition.substring(1);
|
||||
}
|
||||
|
||||
private Response performRequest(String method, String uri) throws IOException {
|
||||
return performRequest(method, uri, null);
|
||||
}
|
||||
|
||||
private Response performRequest(String method, String uri, @Nullable Object payload)
|
||||
throws IOException {
|
||||
return performRequest(method, uri, payload, Collections.emptyMap());
|
||||
}
|
||||
|
||||
private Response performRequest(
|
||||
String method, Object payload, String uri, Map<String, String> params) throws IOException {
|
||||
Request request = new Request(method, uri);
|
||||
String method, String uri, @Nullable Object payload, Map<String, String> params)
|
||||
throws IOException {
|
||||
Request request = new Request(method, uri.startsWith("/") ? uri : "/" + uri);
|
||||
if (payload != null) {
|
||||
String payloadStr = payload instanceof String ? (String) payload : payload.toString();
|
||||
request.setEntity(new NStringEntity(payloadStr, ContentType.APPLICATION_JSON));
|
||||
}
|
||||
for (Map.Entry<String, String> entry : params.entrySet()) {
|
||||
request.addParameter(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ java_library(
|
||||
srcs = glob(["**/*.java"]),
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//java/com/google/gerrit/common:annotations",
|
||||
"//java/com/google/gerrit/extensions:api",
|
||||
"//java/com/google/gerrit/index",
|
||||
"//java/com/google/gerrit/index:query_exception",
|
||||
|
@ -79,7 +79,7 @@ public class ElasticAccountIndex extends AbstractElasticIndex<Account.Id, Accoun
|
||||
.add(new UpdateRequest<>(schema, as));
|
||||
|
||||
String uri = getURI(type, BULK);
|
||||
Response response = postRequest(bulk, uri, getRefreshParam());
|
||||
Response response = postRequest(uri, bulk, getRefreshParam());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
|
@ -138,7 +138,7 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
|
||||
}
|
||||
|
||||
String uri = getURI(type, BULK);
|
||||
Response response = postRequest(bulk, uri, getRefreshParam());
|
||||
Response response = postRequest(uri, bulk, getRefreshParam());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
|
@ -77,7 +77,7 @@ public class ElasticGroupIndex extends AbstractElasticIndex<AccountGroup.UUID, I
|
||||
.add(new UpdateRequest<>(schema, group));
|
||||
|
||||
String uri = getURI(type, BULK);
|
||||
Response response = postRequest(bulk, uri, getRefreshParam());
|
||||
Response response = postRequest(uri, bulk, getRefreshParam());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
|
@ -77,7 +77,7 @@ public class ElasticProjectIndex extends AbstractElasticIndex<Project.NameKey, P
|
||||
.add(new UpdateRequest<>(schema, projectState));
|
||||
|
||||
String uri = getURI(type, BULK);
|
||||
Response response = postRequest(bulk, uri, getRefreshParam());
|
||||
Response response = postRequest(uri, bulk, getRefreshParam());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new IOException(
|
||||
|
@ -33,7 +33,7 @@ public class ElasticQueryAdapter {
|
||||
ElasticQueryAdapter(ElasticVersion version) {
|
||||
this.ignoreUnmapped = version == ElasticVersion.V2_4;
|
||||
this.usePostV5Type = version.isV6();
|
||||
this.versionDiscoveryUrl = version.isV6() ? "%s*" : "%s*/_aliases";
|
||||
this.versionDiscoveryUrl = version.isV6() ? "/%s*" : "/%s*/_aliases";
|
||||
|
||||
switch (version) {
|
||||
case V5_6:
|
||||
|
@ -106,7 +106,7 @@ class ElasticRestClientProvider implements Provider<RestClient>, LifecycleListen
|
||||
|
||||
private ElasticVersion getVersion() throws ElasticException {
|
||||
try {
|
||||
Response response = client.performRequest(new Request("GET", ""));
|
||||
Response response = client.performRequest(new Request("GET", "/"));
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
if (statusLine.getStatusCode() != HttpStatus.SC_OK) {
|
||||
throw new FailedToGetVersion(statusLine);
|
||||
|
@ -34,6 +34,8 @@ public class ProjectInput {
|
||||
public InheritableBoolean requireChangeId;
|
||||
public InheritableBoolean createNewChangeForAllNotInTarget;
|
||||
public InheritableBoolean rejectEmptyCommit;
|
||||
public InheritableBoolean enableSignedPush;
|
||||
public InheritableBoolean requireSignedPush;
|
||||
public String maxObjectSizeLimit;
|
||||
public Map<String, Map<String, ConfigValue>> pluginConfigValues;
|
||||
}
|
||||
|
@ -110,14 +110,9 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
final MultiProgressMonitor progress;
|
||||
|
||||
private final Collection<ReceiveCommand> commands;
|
||||
private final ReceiveCommits receiveCommits;
|
||||
|
||||
private Worker(Collection<ReceiveCommand> commands) {
|
||||
this.commands = commands;
|
||||
receiveCommits =
|
||||
factory.create(
|
||||
projectState, user, receivePack, allRefsWatcher, extraReviewers, messageSender);
|
||||
receiveCommits.init();
|
||||
progress = new MultiProgressMonitor(new MessageSenderOutputStream(), "Processing changes");
|
||||
}
|
||||
|
||||
@ -173,7 +168,7 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
}
|
||||
}
|
||||
|
||||
private final ReceiveCommits.Factory factory;
|
||||
private final ReceiveCommits receiveCommits;
|
||||
private final PermissionBackend.ForProject perm;
|
||||
private final ReceivePack receivePack;
|
||||
private final ExecutorService executor;
|
||||
@ -184,8 +179,6 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
private final ProjectState projectState;
|
||||
private final IdentifiedUser user;
|
||||
private final Repository repo;
|
||||
private final MessageSender messageSender;
|
||||
private final SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers;
|
||||
private final AllRefsWatcher allRefsWatcher;
|
||||
|
||||
@Inject
|
||||
@ -206,7 +199,6 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
@Assisted @Nullable MessageSender messageSender,
|
||||
@Assisted SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers)
|
||||
throws PermissionBackendException {
|
||||
this.factory = factory;
|
||||
this.executor = executor;
|
||||
this.scopePropagator = scopePropagator;
|
||||
this.receiveConfig = receiveConfig;
|
||||
@ -215,8 +207,6 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
this.projectState = projectState;
|
||||
this.user = user;
|
||||
this.repo = repo;
|
||||
this.messageSender = messageSender;
|
||||
this.extraReviewers = extraReviewers;
|
||||
|
||||
Project.NameKey projectName = projectState.getNameKey();
|
||||
receivePack = new ReceivePack(repo);
|
||||
@ -251,6 +241,11 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
advHooks.add(new ReceiveCommitsAdvertiseRefsHook(queryProvider, projectName));
|
||||
advHooks.add(new HackPushNegotiateHook());
|
||||
receivePack.setAdvertiseRefsHook(AdvertiseRefsHookChain.newChain(advHooks));
|
||||
|
||||
receiveCommits =
|
||||
factory.create(
|
||||
projectState, user, receivePack, allRefsWatcher, extraReviewers, messageSender);
|
||||
receiveCommits.init();
|
||||
}
|
||||
|
||||
/** Determine if the user can upload commits. */
|
||||
@ -275,6 +270,11 @@ public class AsyncReceiveCommits implements PreReceiveHook {
|
||||
|
||||
@Override
|
||||
public void onPreReceive(ReceivePack rp, Collection<ReceiveCommand> commands) {
|
||||
if (commands.stream().anyMatch(c -> c.getResult() != Result.NOT_ATTEMPTED)) {
|
||||
// Stop processing when command was already processed by previously invoked
|
||||
// pre-receive hooks
|
||||
return;
|
||||
}
|
||||
Worker w = new Worker(commands);
|
||||
try {
|
||||
w.progress.waitFor(
|
||||
|
@ -35,6 +35,8 @@ public class CreateProjectArgs {
|
||||
public InheritableBoolean newChangeForAllNotInTarget;
|
||||
public InheritableBoolean changeIdRequired;
|
||||
public InheritableBoolean rejectEmptyCommit;
|
||||
public InheritableBoolean enableSignedPush;
|
||||
public InheritableBoolean requireSignedPush;
|
||||
public boolean createEmptyCommit;
|
||||
public String maxObjectSizeLimit;
|
||||
|
||||
@ -44,6 +46,8 @@ public class CreateProjectArgs {
|
||||
contentMerge = InheritableBoolean.INHERIT;
|
||||
changeIdRequired = InheritableBoolean.INHERIT;
|
||||
newChangeForAllNotInTarget = InheritableBoolean.INHERIT;
|
||||
enableSignedPush = InheritableBoolean.INHERIT;
|
||||
requireSignedPush = InheritableBoolean.INHERIT;
|
||||
submitType = SubmitType.MERGE_IF_NECESSARY;
|
||||
}
|
||||
|
||||
|
@ -203,6 +203,10 @@ public class CreateProject
|
||||
MoreObjects.firstNonNull(input.requireChangeId, InheritableBoolean.INHERIT);
|
||||
args.rejectEmptyCommit =
|
||||
MoreObjects.firstNonNull(input.rejectEmptyCommit, InheritableBoolean.INHERIT);
|
||||
args.enableSignedPush =
|
||||
MoreObjects.firstNonNull(input.enableSignedPush, InheritableBoolean.INHERIT);
|
||||
args.requireSignedPush =
|
||||
MoreObjects.firstNonNull(input.requireSignedPush, InheritableBoolean.INHERIT);
|
||||
try {
|
||||
args.maxObjectSizeLimit = ProjectConfig.validMaxObjectSizeLimit(input.maxObjectSizeLimit);
|
||||
} catch (ConfigInvalidException e) {
|
||||
@ -297,6 +301,8 @@ public class CreateProject
|
||||
newProject.setBooleanConfig(BooleanProjectConfig.REQUIRE_CHANGE_ID, args.changeIdRequired);
|
||||
newProject.setBooleanConfig(BooleanProjectConfig.REJECT_EMPTY_COMMIT, args.rejectEmptyCommit);
|
||||
newProject.setMaxObjectSizeLimit(args.maxObjectSizeLimit);
|
||||
newProject.setBooleanConfig(BooleanProjectConfig.ENABLE_SIGNED_PUSH, args.enableSignedPush);
|
||||
newProject.setBooleanConfig(BooleanProjectConfig.REQUIRE_SIGNED_PUSH, args.requireSignedPush);
|
||||
if (args.newParent != null) {
|
||||
newProject.setParentName(args.newParent);
|
||||
}
|
||||
|
@ -230,6 +230,15 @@ public abstract class AbstractPushForReview extends AbstractDaemonTest {
|
||||
assertPushRejected(pr, r, "no common ancestry");
|
||||
}
|
||||
|
||||
@Test
|
||||
@GerritConfig(name = "receive.enableSignedPush", value = "true")
|
||||
@TestProjectInput(
|
||||
enableSignedPush = InheritableBoolean.TRUE,
|
||||
requireSignedPush = InheritableBoolean.TRUE)
|
||||
public void nonSignedPushRejectedWhenSignPushRequired() throws Exception {
|
||||
pushTo("refs/for/master").assertErrorStatus("push cert error");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pushInitialCommitForRefsMetaConfigBranch() throws Exception {
|
||||
// delete refs/meta/config
|
||||
|
@ -18,16 +18,9 @@ package(
|
||||
|
||||
load("@io_bazel_rules_closure//closure:defs.bzl", "closure_js_library")
|
||||
|
||||
genrule(
|
||||
name = "polymer_closure_renamed",
|
||||
srcs = ["@polymer_closure//file"],
|
||||
outs = ["polymer_closure_renamed.js"],
|
||||
cmd = "cp $< $@",
|
||||
)
|
||||
|
||||
closure_js_library(
|
||||
name = "polymer_closure",
|
||||
srcs = [":polymer_closure_renamed"],
|
||||
srcs = ["@polymer_closure//file"],
|
||||
data = ["//lib:LICENSE-Apache2.0"],
|
||||
no_closure_library = True,
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user