Merge branch 'stable-2.15'

* stable-2.15:
  Update git submodules
  Reformat all Java files with google-java-format 1.6
  Reformat all Java files with google-java-format 1.6
  ChangeNotesTest: Have at most one call after ExpectedException#expect
  FieldType: Remove unnecessary KEYWORD type
  Remove unnecessary annotations from reindex test classes
  AbstractReindexTests: Assert account index and group index
  AbstractElasticIndex: Rename getActions to getDeleteActions
  dev-bazel: Improve documentation of build caches
  Hoist declaration of TestName up to GerritBaseTests
  Bazel: Replace native {http,git}_archive with Skylark rules
  Bazel: Bump rules_closure to 0.7.0
  dev-contributing: Update link to buildifier tool
  dev-contributing: Update buildifier to latest released version
  Bump minimum Bazel version to 0.14.0 and activate caches
  ElasticReindexIT: Add tests against Elasticsearch version 5
  Elasticsearch: Add tests for queries against version 5
  MatchQueryBuilder: Don't use deprecated "match" query
  Elasticsearch: Add an adapter to support V5
  Add Bazel version check

Change-Id: I6bbf229b76790f3fbb74a952d4330ba92e3dd449
This commit is contained in:
David Pursehouse 2018-06-06 11:55:54 +09:00
commit acb1889028
33 changed files with 546 additions and 265 deletions

View File

@ -0,0 +1,133 @@
Date: Wed, 30 May 2018 21:22:18 +0200
Subject: [PATCH] Replace native {http,git}_archive with Skylark rules
See [1] for more details.
Test Plan:
* Apply this CL on Bazel master: [2] and build bazel
* Run with this custom built bazel version:
$ bazel test //javatests/...
$ bazel test //closure/...
[1] https://groups.google.com/d/topic/bazel-discuss/dO2MHQLwJF0/discussion
[2] https://bazel-review.googlesource.com/#/c/bazel/+/55932/
---
closure/repositories.bzl | 23 ++++++++++++-----------
1 file changed, 12 insertions(+), 11 deletions(-)
diff --git a/closure/repositories.bzl b/closure/repositories.bzl
index 9b84a72..2816fb6 100644
--- closure/repositories.bzl
+++ closure/repositories.bzl
@@ -14,6 +14,7 @@
"""External dependencies for Closure Rules."""
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
load("//closure/private:java_import_external.bzl", "java_import_external")
load("//closure/private:platform_http_file.bzl", "platform_http_file")
load("//closure:filegroup_external.bzl", "filegroup_external")
@@ -405,7 +406,7 @@ def com_google_common_html_types():
)
def com_google_common_html_types_html_proto():
- native.http_file(
+ http_file(
name = "com_google_common_html_types_html_proto",
sha256 = "6ece202f11574e37d0c31d9cf2e9e11a0dbc9218766d50d211059ebd495b49c3",
urls = [
@@ -633,7 +634,7 @@ def com_google_javascript_closure_compiler():
def com_google_javascript_closure_library():
# After updating: bazel run //closure/library:regenerate -- "$PWD"
- native.new_http_archive(
+ http_archive(
name = "com_google_javascript_closure_library",
urls = [
"https://mirror.bazel.build/github.com/google/closure-library/archive/v20180405.tar.gz",
@@ -658,7 +659,7 @@ def com_google_jsinterop_annotations():
def com_google_protobuf():
# Note: Protobuf 3.6.0+ is going to use C++11
- native.http_archive(
+ http_archive(
name = "com_google_protobuf",
strip_prefix = "protobuf-3.5.1",
sha256 = "826425182ee43990731217b917c5c3ea7190cfda141af4869e6d4ad9085a740f",
@@ -669,7 +670,7 @@ def com_google_protobuf():
)
def com_google_protobuf_js():
- native.new_http_archive(
+ http_archive(
name = "com_google_protobuf_js",
urls = [
"https://mirror.bazel.build/github.com/google/protobuf/archive/v3.5.1.tar.gz",
@@ -722,7 +723,7 @@ def com_google_template_soy():
)
def com_google_template_soy_jssrc():
- native.new_http_archive(
+ http_archive(
name = "com_google_template_soy_jssrc",
sha256 = "c76ab4cb6e46a7c76336640b3c40d6897b420209a6c0905cdcd32533dda8126a",
urls = [
@@ -757,7 +758,7 @@ def com_squareup_javapoet():
)
def fonts_noto_hinted_deb():
- native.http_file(
+ http_file(
name = "fonts_noto_hinted_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/f/fonts-noto/fonts-noto-hinted_20161116-1_all.deb",
@@ -767,7 +768,7 @@ def fonts_noto_hinted_deb():
)
def fonts_noto_mono_deb():
- native.http_file(
+ http_file(
name = "fonts_noto_mono_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/f/fonts-noto/fonts-noto-mono_20161116-1_all.deb",
@@ -801,7 +802,7 @@ def javax_inject():
)
def libexpat_amd64_deb():
- native.http_file(
+ http_file(
name = "libexpat_amd64_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/e/expat/libexpat1_2.1.0-6+deb8u3_amd64.deb",
@@ -811,7 +812,7 @@ def libexpat_amd64_deb():
)
def libfontconfig_amd64_deb():
- native.http_file(
+ http_file(
name = "libfontconfig_amd64_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.11.0-6.3+deb8u1_amd64.deb",
@@ -821,7 +822,7 @@ def libfontconfig_amd64_deb():
)
def libfreetype_amd64_deb():
- native.http_file(
+ http_file(
name = "libfreetype_amd64_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.5.2-3+deb8u1_amd64.deb",
@@ -831,7 +832,7 @@ def libfreetype_amd64_deb():
)
def libpng_amd64_deb():
- native.http_file(
+ http_file(
name = "libpng_amd64_deb",
urls = [
"https://mirror.bazel.build/http.us.debian.org/debian/pool/main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_amd64.deb",
--
2.16.3

View File

@ -369,36 +369,27 @@ To consume the JGit dependency from the development tree, edit
`lib/jgit/jgit.bzl` setting LOCAL_JGIT_REPO to a directory holding a
JGit repository.
[[local-action-cache]]
[[bazel-local-caches]]
To accelerate builds, local action cache can be activated.
To activate the local action cache add these lines to your `~/.bazelrc` file:
To accelerate builds, several caches are activated per default:
* ~/.gerritcodereview/bazel-cache/downloaded-artifacts
* ~/.gerritcodereview/bazel-cache/repository
* ~/.gerritcodereview/bazel-cache/cas
Currently none of these caches have a maximum size limit. See
link:https://github.com/bazelbuild/bazel/issues/5139[this bazel issue] for
details. Users should watch the cache sizes and clean them manually if
necessary.
Due to the `--experimental_strict_action_env` option used in `bazelrc`
it is possible that some commands required by the build are not found
on the PATH, causing the build to fail. In this case the PATH used in
the build can be overridden with the `--action_env=PATH` directive in
the user's `~/.bazelrc` file, for example:
----
build --disk_cache=~/.gerritcodereview/bazel-cache/cas
build --experimental_strict_action_env
build --action_env=PATH
----
[[repository_cache]]
To accelerate fetches, local repository cache is activated per default in Bazel.
This cache is only used for rules_closure external repository and transitive
dependendcies. That's because rules_closure uses standard Bazel download facility.
For all other gerrit dependencies, the download_artifacts repository cache is used
already.
To change the default local repository cache directry, create accessible cache
directory:
----
mkdir -p ~/.gerritcodereview/bazel-cache/repository
----
and add this line to your `~/.bazelrc` file:
----
build --repository_cache=/home/<user>/.gerritcodereview/bazel-cache/repository
build --action_env=PATH=/usr/local/opt/coreutils/libexec/gnubin/:/usr/local/bin/:/usr/bin/
----
GERRIT

View File

@ -165,7 +165,8 @@ Guide].
To format Java source code, Gerrit uses the
link:https://github.com/google/google-java-format[`google-java-format`]
tool (version 1.5), and to format Bazel BUILD and WORKSPACE files the
link:https://github.com/bazelbuild/buildifier[`buildifier`] tool (version 0.6.0).
link:https://github.com/bazelbuild/buildtools/tree/master/buildifier[`buildifier`]
tool (version 0.11.1).
These tools automatically apply format according to the style guides; this
streamlines code review by reducing the need for time-consuming, tedious,
and contentious discussions about trivial issues like whitespace.

View File

@ -1,5 +1,6 @@
workspace(name = "gerrit")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
load("//tools/bzl:maven_jar.bzl", "maven_jar", "GERRIT", "MAVEN_LOCAL")
load("//lib/codemirror:cm.bzl", "CM_VERSION", "DIFF_MATCH_PATCH_VERSION")
load("//plugins:external_plugin_deps.bzl", "external_plugin_deps")
@ -13,6 +14,8 @@ http_archive(
http_archive(
name = "io_bazel_rules_closure",
build_file_content = "exports_files([\"0001-Replace-native-http-git-_archive-with-Skylark-rules.patch\"])",
patches = ["//:0001-Replace-native-http-git-_archive-with-Skylark-rules.patch"],
sha256 = "a80acb69c63d5f6437b099c111480a4493bad4592015af2127a2f49fb7512d8d",
strip_prefix = "rules_closure-0.7.0",
url = "https://github.com/bazelbuild/rules_closure/archive/0.7.0.tar.gz",
@ -24,12 +27,12 @@ http_archive(
http_file(
name = "polymer_closure",
sha256 = "5a589bdba674e1fec7188e9251c8624ebf2d4d969beb6635f9148f420d1e08b1",
url = "https://raw.githubusercontent.com/google/closure-compiler/775609aad61e14aef289ebec4bfc09ad88877f9e/contrib/externs/polymer-1.0.js",
urls = ["https://raw.githubusercontent.com/google/closure-compiler/775609aad61e14aef289ebec4bfc09ad88877f9e/contrib/externs/polymer-1.0.js"],
)
load("@bazel_skylib//:lib.bzl", "versions")
versions.check(minimum_bazel_version = "0.7.0")
versions.check(minimum_bazel_version = "0.14.0")
load("@io_bazel_rules_closure//closure:defs.bzl", "closure_repositories")

View File

@ -1,143 +0,0 @@
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.elasticsearch;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gson.JsonParser;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.io.IOException;
import org.apache.http.HttpHost;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
class ElasticRestClientProvider implements Provider<RestClient>, LifecycleListener {
private static final Logger log = LoggerFactory.getLogger(ElasticRestClientProvider.class);
private final HttpHost[] hosts;
private final String username;
private final String password;
private RestClient client;
@Inject
ElasticRestClientProvider(ElasticConfiguration cfg) {
hosts = cfg.urls.toArray(new HttpHost[cfg.urls.size()]);
username = cfg.username;
password = cfg.password;
}
public static LifecycleModule module() {
return new LifecycleModule() {
@Override
protected void configure() {
listener().to(ElasticRestClientProvider.class);
}
};
}
@Override
public RestClient get() {
if (client == null) {
synchronized (this) {
if (client == null) {
client = build();
ElasticVersion version = getVersion();
log.info("Elasticsearch integration version {}", version);
}
}
}
return client;
}
@Override
public void start() {}
@Override
public void stop() {
if (client != null) {
try {
client.close();
} catch (IOException e) {
// Ignore. We can't do anything about it.
}
}
}
public static class FailedToGetVersion extends ElasticException {
private static final long serialVersionUID = 1L;
private static final String MESSAGE = "Failed to get Elasticsearch version";
FailedToGetVersion(StatusLine status) {
super(String.format("%s: %d %s", MESSAGE, status.getStatusCode(), status.getReasonPhrase()));
}
FailedToGetVersion(Throwable cause) {
super(MESSAGE, cause);
}
}
private ElasticVersion getVersion() throws ElasticException {
try {
Response response = client.performRequest("GET", "");
StatusLine statusLine = response.getStatusLine();
if (statusLine.getStatusCode() != HttpStatus.SC_OK) {
throw new FailedToGetVersion(statusLine);
}
String version =
new JsonParser()
.parse(AbstractElasticIndex.getContent(response))
.getAsJsonObject()
.get("version")
.getAsJsonObject()
.get("number")
.getAsString();
log.info("Connected to Elasticsearch version {}", version);
return ElasticVersion.forVersion(version);
} catch (IOException e) {
throw new FailedToGetVersion(e);
}
}
private RestClient build() {
RestClientBuilder builder = RestClient.builder(hosts);
setConfiguredCredentialsIfAny(builder);
return builder.build();
}
private void setConfiguredCredentialsIfAny(RestClientBuilder builder) {
if (username != null && password != null) {
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(
AuthScope.ANY, new UsernamePasswordCredentials(username, password));
builder.setHttpClientConfigCallback(
(HttpAsyncClientBuilder httpClientBuilder) ->
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
}
}

View File

@ -75,7 +75,6 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
protected static final String BULK = "_bulk";
protected static final String IGNORE_UNMAPPED = "ignore_unmapped";
protected static final String ORDER = "order";
protected static final String SEARCH = "_search";
@ -105,8 +104,8 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
private final Schema<V> schema;
private final SitePaths sitePaths;
private final String indexNameRaw;
private final ElasticRestClientProvider client;
protected final ElasticRestClientProvider client;
protected final String indexName;
protected final Gson gson;
protected final ElasticQueryBuilder queryBuilder;
@ -142,20 +141,21 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
}
@Override
public void delete(K c) throws IOException {
public void delete(K id) throws IOException {
String uri = getURI(indexNameRaw, BULK);
Response response = postRequest(addActions(c), uri, getRefreshParam());
Response response = postRequest(getDeleteActions(id), uri, getRefreshParam());
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
throw new IOException(
String.format("Failed to delete %s from index %s: %s", c, indexName, statusCode));
String.format("Failed to delete %s from index %s: %s", id, indexName, statusCode));
}
}
@Override
public void deleteAll() throws IOException {
// Delete the index, if it exists.
Response response = client.get().performRequest("HEAD", indexName);
String endpoint = indexName + client.adapter().indicesExistParam();
Response response = client.get().performRequest("HEAD", endpoint);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK) {
response = client.get().performRequest("DELETE", indexName);
@ -175,15 +175,14 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
}
}
protected abstract String addActions(K c);
protected abstract String getDeleteActions(K id);
protected abstract String getMappings();
protected abstract String getId(V v);
protected String delete(String type, K c) {
String id = c.toString();
return new DeleteRequest(id, indexNameRaw, type).toString();
protected String delete(String type, K id) {
return new DeleteRequest(id.toString(), indexNameRaw, type).toString();
}
protected abstract V fromDocument(JsonObject doc, Set<String> fields);
@ -250,7 +249,7 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
protected JsonArray getSortArray(String idFieldName) {
JsonObject properties = new JsonObject();
properties.addProperty(ORDER, "asc");
properties.addProperty(IGNORE_UNMAPPED, true);
client.adapter().setIgnoreUnmapped(properties);
JsonArray sortArray = new JsonArray();
addNamedElement(idFieldName, properties, sortArray);
@ -286,7 +285,7 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
this.index = index;
QueryBuilder qb = queryBuilder.toQueryBuilder(p);
SearchSourceBuilder searchSource =
new SearchSourceBuilder()
new SearchSourceBuilder(client.adapter())
.query(qb)
.from(opts.start())
.size(opts.limit())

View File

@ -49,8 +49,8 @@ public class ElasticAccountIndex extends AbstractElasticIndex<Account.Id, Accoun
static class AccountMapping {
MappingProperties accounts;
AccountMapping(Schema<AccountState> schema) {
this.accounts = ElasticMapping.createMapping(schema);
AccountMapping(Schema<AccountState> schema, ElasticQueryAdapter adapter) {
this.accounts = ElasticMapping.createMapping(schema, adapter);
}
}
@ -69,7 +69,7 @@ public class ElasticAccountIndex extends AbstractElasticIndex<Account.Id, Accoun
@Assisted Schema<AccountState> schema) {
super(cfg, sitePaths, schema, client, ACCOUNTS);
this.accountCache = accountCache;
this.mapping = new AccountMapping(schema);
this.mapping = new AccountMapping(schema, client.adapter());
this.schema = schema;
}
@ -98,8 +98,8 @@ public class ElasticAccountIndex extends AbstractElasticIndex<Account.Id, Accoun
}
@Override
protected String addActions(Account.Id c) {
return delete(ACCOUNTS, c);
protected String getDeleteActions(Account.Id a) {
return delete(ACCOUNTS, a);
}
@Override

View File

@ -79,8 +79,8 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
MappingProperties openChanges;
MappingProperties closedChanges;
ChangeMapping(Schema<ChangeData> schema) {
MappingProperties mapping = ElasticMapping.createMapping(schema);
ChangeMapping(Schema<ChangeData> schema, ElasticQueryAdapter adapter) {
MappingProperties mapping = ElasticMapping.createMapping(schema, adapter);
this.openChanges = mapping;
this.closedChanges = mapping;
}
@ -107,7 +107,7 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
this.db = db;
this.changeDataFactory = changeDataFactory;
this.schema = schema;
mapping = new ChangeMapping(schema);
mapping = new ChangeMapping(schema, client.adapter());
}
@Override
@ -161,7 +161,7 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
private JsonArray getSortArray() {
JsonObject properties = new JsonObject();
properties.addProperty(ORDER, "desc");
properties.addProperty(IGNORE_UNMAPPED, true);
client.adapter().setIgnoreUnmapped(properties);
JsonArray sortArray = new JsonArray();
addNamedElement(ChangeField.UPDATED.getName(), properties, sortArray);
@ -174,7 +174,7 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
}
@Override
protected String addActions(Id c) {
protected String getDeleteActions(Id c) {
return delete(OPEN_CHANGES, c) + delete(CLOSED_CHANGES, c);
}

View File

@ -47,8 +47,8 @@ public class ElasticGroupIndex extends AbstractElasticIndex<AccountGroup.UUID, I
static class GroupMapping {
MappingProperties groups;
GroupMapping(Schema<InternalGroup> schema) {
this.groups = ElasticMapping.createMapping(schema);
GroupMapping(Schema<InternalGroup> schema, ElasticQueryAdapter adapter) {
this.groups = ElasticMapping.createMapping(schema, adapter);
}
}
@ -67,7 +67,7 @@ public class ElasticGroupIndex extends AbstractElasticIndex<AccountGroup.UUID, I
@Assisted Schema<InternalGroup> schema) {
super(cfg, sitePaths, schema, client, GROUPS);
this.groupCache = groupCache;
this.mapping = new GroupMapping(schema);
this.mapping = new GroupMapping(schema, client.adapter());
this.schema = schema;
}
@ -95,8 +95,8 @@ public class ElasticGroupIndex extends AbstractElasticIndex<AccountGroup.UUID, I
}
@Override
protected String addActions(AccountGroup.UUID c) {
return delete(GROUPS, c);
protected String getDeleteActions(AccountGroup.UUID g) {
return delete(GROUPS, g);
}
@Override

View File

@ -21,12 +21,12 @@ import com.google.gerrit.index.Schema;
import java.util.Map;
class ElasticMapping {
static MappingProperties createMapping(Schema<?> schema) {
ElasticMapping.Builder mapping = new ElasticMapping.Builder();
static MappingProperties createMapping(Schema<?> schema, ElasticQueryAdapter adapter) {
ElasticMapping.Builder mapping = new ElasticMapping.Builder(adapter);
for (FieldDef<?, ?> field : schema.getFields().values()) {
String name = field.getName();
FieldType<?> fieldType = field.getType();
if (fieldType == FieldType.EXACT || fieldType == FieldType.KEYWORD) {
if (fieldType == FieldType.EXACT) {
mapping.addExactField(name);
} else if (fieldType == FieldType.TIMESTAMP) {
mapping.addTimestamp(name);
@ -46,9 +46,14 @@ class ElasticMapping {
}
static class Builder {
private final ElasticQueryAdapter adapter;
private final ImmutableMap.Builder<String, FieldProperties> fields =
new ImmutableMap.Builder<>();
Builder(ElasticQueryAdapter adapter) {
this.adapter = adapter;
}
MappingProperties build() {
MappingProperties properties = new MappingProperties();
properties.properties = fields.build();
@ -56,9 +61,10 @@ class ElasticMapping {
}
Builder addExactField(String name) {
FieldProperties key = new FieldProperties("string");
key.index = "not_analyzed";
FieldProperties properties = new FieldProperties("string");
FieldProperties key = new FieldProperties(adapter.exactFieldType());
key.index = adapter.indexProperty();
FieldProperties properties;
properties = new FieldProperties(adapter.exactFieldType());
properties.fields = ImmutableMap.of("key", key);
fields.put(name, properties);
return this;
@ -78,7 +84,7 @@ class ElasticMapping {
}
Builder addString(String name) {
fields.put(name, new FieldProperties("string"));
fields.put(name, new FieldProperties(adapter.stringFieldType()));
return this;
}

View File

@ -47,8 +47,8 @@ public class ElasticProjectIndex extends AbstractElasticIndex<Project.NameKey, P
static class ProjectMapping {
MappingProperties projects;
ProjectMapping(Schema<ProjectData> schema) {
this.projects = ElasticMapping.createMapping(schema);
ProjectMapping(Schema<ProjectData> schema, ElasticQueryAdapter adapter) {
this.projects = ElasticMapping.createMapping(schema, adapter);
}
}
@ -63,12 +63,12 @@ public class ElasticProjectIndex extends AbstractElasticIndex<Project.NameKey, P
ElasticConfiguration cfg,
SitePaths sitePaths,
Provider<ProjectCache> projectCache,
ElasticRestClientProvider clientBuilder,
ElasticRestClientProvider client,
@Assisted Schema<ProjectData> schema) {
super(cfg, sitePaths, schema, clientBuilder, PROJECTS);
super(cfg, sitePaths, schema, client, PROJECTS);
this.projectCache = projectCache;
this.schema = schema;
this.mapping = new ProjectMapping(schema);
this.mapping = new ProjectMapping(schema, client.adapter());
}
@Override
@ -97,7 +97,7 @@ public class ElasticProjectIndex extends AbstractElasticIndex<Project.NameKey, P
}
@Override
protected String addActions(Project.NameKey nameKey) {
protected String getDeleteActions(Project.NameKey nameKey) {
return delete(PROJECTS, nameKey);
}

View File

@ -0,0 +1,74 @@
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.elasticsearch;
import com.google.gson.JsonObject;
public class ElasticQueryAdapter {
private final boolean ignoreUnmapped;
private final String searchFilteringName;
private final String indicesExistParam;
private final String exactFieldType;
private final String stringFieldType;
private final String indexProperty;
ElasticQueryAdapter(ElasticVersion version) {
this.ignoreUnmapped = version == ElasticVersion.V2_4;
switch (version) {
case V5_6:
case V6_2:
this.searchFilteringName = "_source";
this.indicesExistParam = "?allow_no_indices=false";
this.exactFieldType = "keyword";
this.stringFieldType = "text";
this.indexProperty = "true";
break;
case V2_4:
default:
this.searchFilteringName = "fields";
this.indicesExistParam = "";
this.exactFieldType = "string";
this.stringFieldType = "string";
this.indexProperty = "not_analyzed";
break;
}
}
void setIgnoreUnmapped(JsonObject properties) {
if (ignoreUnmapped) {
properties.addProperty("ignore_unmapped", true);
}
}
public String searchFilteringName() {
return searchFilteringName;
}
String indicesExistParam() {
return indicesExistParam;
}
String exactFieldType() {
return exactFieldType;
}
String stringFieldType() {
return stringFieldType;
}
String indexProperty() {
return indexProperty;
}
}

View File

@ -94,7 +94,7 @@ public class ElasticQueryBuilder {
return intRangeQuery(p);
} else if (type == FieldType.TIMESTAMP) {
return timestampQuery(p);
} else if (type == FieldType.EXACT || type == FieldType.KEYWORD) {
} else if (type == FieldType.EXACT) {
return exactQuery(p);
} else if (type == FieldType.PREFIX) {
return QueryBuilders.matchPhrasePrefixQuery(name, value);

View File

@ -43,6 +43,7 @@ class ElasticRestClientProvider implements Provider<RestClient>, LifecycleListen
private final String password;
private RestClient client;
private ElasticQueryAdapter adapter;
@Inject
ElasticRestClientProvider(ElasticConfiguration cfg) {
@ -68,6 +69,7 @@ class ElasticRestClientProvider implements Provider<RestClient>, LifecycleListen
client = build();
ElasticVersion version = getVersion();
logger.atInfo().log("Elasticsearch integration version %s", version);
adapter = new ElasticQueryAdapter(version);
}
}
}
@ -88,6 +90,11 @@ class ElasticRestClientProvider implements Provider<RestClient>, LifecycleListen
}
}
ElasticQueryAdapter adapter() {
get(); // Make sure we're connected
return adapter;
}
public static class FailedToGetVersion extends ElasticException {
private static final long serialVersionUID = 1L;
private static final String MESSAGE = "Failed to get Elasticsearch version";

View File

@ -27,9 +27,14 @@ class MatchQueryBuilder extends QueryBuilder {
enum Type {
/** The text is analyzed and used as a phrase query. */
PHRASE,
MATCH_PHRASE,
/** The text is analyzed and used in a phrase query, with the last term acting as a prefix. */
PHRASE_PREFIX
MATCH_PHRASE_PREFIX;
@Override
public String toString() {
return name().toLowerCase(Locale.US);
}
}
private final String name;
@ -52,14 +57,6 @@ class MatchQueryBuilder extends QueryBuilder {
@Override
protected void doXContent(XContentBuilder builder) throws IOException {
builder.startObject("match");
builder.startObject(name);
builder.field("query", text);
if (type != null) {
builder.field("type", type.toString().toLowerCase(Locale.ENGLISH));
}
builder.endObject();
builder.endObject();
builder.startObject(type.toString()).field(name, text).endObject();
}
}

View File

@ -33,7 +33,7 @@ public abstract class QueryBuilders {
* @param text The query text (to be analyzed).
*/
public static MatchQueryBuilder matchPhraseQuery(String name, Object text) {
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE);
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.MATCH_PHRASE);
}
/**
@ -43,7 +43,7 @@ public abstract class QueryBuilders {
* @param text The query text (to be analyzed).
*/
public static MatchQueryBuilder matchPhrasePrefixQuery(String name, Object text) {
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE_PREFIX);
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.MATCH_PHRASE_PREFIX);
}
/**

View File

@ -14,6 +14,7 @@
package com.google.gerrit.elasticsearch.builders;
import com.google.gerrit.elasticsearch.ElasticQueryAdapter;
import java.io.IOException;
import java.util.List;
@ -23,6 +24,7 @@ import java.util.List;
* <p>A trimmed down and modified version of org.elasticsearch.search.builder.SearchSourceBuilder.
*/
public class SearchSourceBuilder {
private final ElasticQueryAdapter adapter;
private QuerySourceBuilder querySourceBuilder;
@ -33,7 +35,9 @@ public class SearchSourceBuilder {
private List<String> fieldNames;
/** Constructs a new search source builder. */
public SearchSourceBuilder() {}
public SearchSourceBuilder(ElasticQueryAdapter adapter) {
this.adapter = adapter;
}
/** Constructs a new search source builder with a search query. */
public SearchSourceBuilder query(QueryBuilder query) {
@ -95,9 +99,9 @@ public class SearchSourceBuilder {
if (fieldNames != null) {
if (fieldNames.size() == 1) {
builder.field("fields", fieldNames.get(0));
builder.field(adapter.searchFilteringName(), fieldNames.get(0));
} else {
builder.startArray("fields");
builder.startArray(adapter.searchFilteringName());
for (String fieldName : fieldNames) {
builder.value(fieldName);
}

View File

@ -34,10 +34,6 @@ public final class FieldDef<I, T> {
return new FieldDef.Builder<>(FieldType.EXACT, name);
}
public static FieldDef.Builder<String> keyword(String name) {
return new FieldDef.Builder<>(FieldType.KEYWORD, name);
}
public static FieldDef.Builder<String> fullText(String name) {
return new FieldDef.Builder<>(FieldType.FULL_TEXT, name);
}

View File

@ -33,9 +33,6 @@ public class FieldType<T> {
/** A string field searched using exact-match semantics. */
public static final FieldType<String> EXACT = new FieldType<>("EXACT");
/** A Keyword field searched using non-analyzed-match semantics. */
public static final FieldType<String> KEYWORD = new FieldType<>("KEYWORD");
/** A string field searched using prefix. */
public static final FieldType<String> PREFIX = new FieldType<>("PREFIX");

View File

@ -330,7 +330,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
for (Object value : values.getValues()) {
doc.add(new LongField(name, ((Timestamp) value).getTime(), store));
}
} else if (type == FieldType.KEYWORD || type == FieldType.EXACT || type == FieldType.PREFIX) {
} else if (type == FieldType.EXACT || type == FieldType.PREFIX) {
for (Object value : values.getValues()) {
doc.add(new StringField(name, (String) value, store));
}
@ -353,10 +353,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
for (IndexableField field : doc.getFields()) {
checkArgument(allFields.containsKey(field.name()), "Unrecognized field " + field.name());
FieldType<?> type = allFields.get(field.name()).getType();
if (type == FieldType.EXACT
|| type == FieldType.FULL_TEXT
|| type == FieldType.PREFIX
|| type == FieldType.KEYWORD) {
if (type == FieldType.EXACT || type == FieldType.FULL_TEXT || type == FieldType.PREFIX) {
rawFields.put(field.name(), field.stringValue());
} else if (type == FieldType.INTEGER || type == FieldType.INTEGER_RANGE) {
rawFields.put(field.name(), field.numericValue().intValue());

View File

@ -148,7 +148,7 @@ public class QueryBuilder<V> {
return intRangeQuery(p);
} else if (type == FieldType.TIMESTAMP) {
return timestampQuery(p);
} else if (type == FieldType.EXACT || type == FieldType.KEYWORD) {
} else if (type == FieldType.EXACT) {
return exactQuery(p);
} else if (type == FieldType.PREFIX) {
return prefixQuery(p);

View File

@ -18,7 +18,6 @@ import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.gerrit.index.FieldDef.exact;
import static com.google.gerrit.index.FieldDef.fullText;
import static com.google.gerrit.index.FieldDef.integer;
import static com.google.gerrit.index.FieldDef.keyword;
import static com.google.gerrit.index.FieldDef.prefix;
import static com.google.gerrit.index.FieldDef.storedOnly;
import static com.google.gerrit.index.FieldDef.timestamp;
@ -41,11 +40,11 @@ public class GroupField {
/** Group UUID. */
public static final FieldDef<InternalGroup, String> UUID =
keyword("uuid").stored().build(g -> g.getGroupUUID().get());
exact("uuid").stored().build(g -> g.getGroupUUID().get());
/** Group owner UUID. */
public static final FieldDef<InternalGroup, String> OWNER_UUID =
keyword("owner_uuid").build(g -> g.getOwnerGroupUUID().get());
exact("owner_uuid").build(g -> g.getOwnerGroupUUID().get());
/** Timestamp indicating when this group was created. */
public static final FieldDef<InternalGroup, Timestamp> CREATED_ON =

View File

@ -42,11 +42,9 @@ import java.util.function.Consumer;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.FS;
import org.junit.Ignore;
import org.junit.Test;
@NoHttpd
@Ignore
public abstract class AbstractReindexTests extends StandaloneSiteTest {
private static final String CHANGES = ChangeSchemaDefinitions.NAME;

View File

@ -14,9 +14,7 @@
package com.google.gerrit.acceptance.pgm;
import com.google.gerrit.acceptance.NoHttpd;
import org.junit.Ignore;
@NoHttpd
@Ignore
public class ElasticReindexIT extends AbstractReindexTests {}

View File

@ -14,7 +14,4 @@
package com.google.gerrit.acceptance.pgm;
import com.google.gerrit.acceptance.NoHttpd;
@NoHttpd
public class ReindexIT extends AbstractReindexTests {}

View File

@ -18,15 +18,12 @@ import com.google.gerrit.elasticsearch.ElasticTestUtils.ElasticNodeInfo;
import com.google.gerrit.server.query.change.AbstractQueryChangesTest;
import com.google.gerrit.testing.ConfigSuite;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.InMemoryRepositoryManager.Repo;
import com.google.gerrit.testing.IndexConfig;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Config;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class ElasticQueryChangesTest extends AbstractQueryChangesTest {
@ConfigSuite.Default
@ -73,12 +70,4 @@ public class ElasticQueryChangesTest extends AbstractQueryChangesTest {
ElasticTestUtils.configure(elasticsearchConfig, nodeInfo.port, indicesPrefix);
return Guice.createInjector(new InMemoryModule(elasticsearchConfig, notesMigration));
}
@Test
public void byOwnerInvalidQuery() throws Exception {
TestRepository<Repo> repo = createProject("repo");
insert(repo, newChange(repo), userId);
String nameEmail = user.asIdentifiedUser().getNameEmail();
assertQuery("owner: \"" + nameEmail + "\"\\");
}
}

View File

@ -0,0 +1,73 @@
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.elasticsearch;
import com.google.gerrit.elasticsearch.ElasticTestUtils.ElasticNodeInfo;
import com.google.gerrit.server.query.account.AbstractQueryAccountsTest;
import com.google.gerrit.testing.ConfigSuite;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.IndexConfig;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.eclipse.jgit.lib.Config;
import org.junit.AfterClass;
import org.junit.BeforeClass;
public class ElasticV5QueryAccountsTest extends AbstractQueryAccountsTest {
@ConfigSuite.Default
public static Config defaultConfig() {
return IndexConfig.createForElasticsearch();
}
private static ElasticNodeInfo nodeInfo;
private static ElasticContainer<?> container;
@BeforeClass
public static void startIndexService() {
if (nodeInfo != null) {
// do not start Elasticsearch twice
return;
}
container = ElasticContainer.createAndStart(ElasticVersion.V5_6);
nodeInfo = new ElasticNodeInfo(container.getHttpHost().getPort());
}
@AfterClass
public static void stopElasticsearchServer() {
if (container != null) {
container.stop();
}
}
private String testName() {
return testName.getMethodName().toLowerCase() + "_";
}
@Override
protected void initAfterLifecycleStart() throws Exception {
super.initAfterLifecycleStart();
ElasticTestUtils.createAllIndexes(injector);
}
@Override
protected Injector createInjector() {
Config elasticsearchConfig = new Config(config);
InMemoryModule.setDefaults(elasticsearchConfig);
String indicesPrefix = testName();
ElasticTestUtils.configure(elasticsearchConfig, nodeInfo.port, indicesPrefix);
return Guice.createInjector(new InMemoryModule(elasticsearchConfig, notesMigration));
}
}

View File

@ -0,0 +1,73 @@
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.elasticsearch;
import com.google.gerrit.elasticsearch.ElasticTestUtils.ElasticNodeInfo;
import com.google.gerrit.server.query.change.AbstractQueryChangesTest;
import com.google.gerrit.testing.ConfigSuite;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.IndexConfig;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.eclipse.jgit.lib.Config;
import org.junit.AfterClass;
import org.junit.BeforeClass;
public class ElasticV5QueryChangesTest extends AbstractQueryChangesTest {
@ConfigSuite.Default
public static Config defaultConfig() {
return IndexConfig.createForElasticsearch();
}
private static ElasticNodeInfo nodeInfo;
private static ElasticContainer<?> container;
@BeforeClass
public static void startIndexService() {
if (nodeInfo != null) {
// do not start Elasticsearch twice
return;
}
container = ElasticContainer.createAndStart(ElasticVersion.V5_6);
nodeInfo = new ElasticNodeInfo(container.getHttpHost().getPort());
}
@AfterClass
public static void stopElasticsearchServer() {
if (container != null) {
container.stop();
}
}
private String testName() {
return testName.getMethodName().toLowerCase() + "_";
}
@Override
protected void initAfterLifecycleStart() throws Exception {
super.initAfterLifecycleStart();
ElasticTestUtils.createAllIndexes(injector);
}
@Override
protected Injector createInjector() {
Config elasticsearchConfig = new Config(config);
InMemoryModule.setDefaults(elasticsearchConfig);
String indicesPrefix = testName();
ElasticTestUtils.configure(elasticsearchConfig, nodeInfo.port, indicesPrefix);
return Guice.createInjector(new InMemoryModule(elasticsearchConfig, notesMigration));
}
}

View File

@ -0,0 +1,73 @@
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.elasticsearch;
import com.google.gerrit.elasticsearch.ElasticTestUtils.ElasticNodeInfo;
import com.google.gerrit.server.query.group.AbstractQueryGroupsTest;
import com.google.gerrit.testing.ConfigSuite;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.IndexConfig;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.eclipse.jgit.lib.Config;
import org.junit.AfterClass;
import org.junit.BeforeClass;
public class ElasticV5QueryGroupsTest extends AbstractQueryGroupsTest {
@ConfigSuite.Default
public static Config defaultConfig() {
return IndexConfig.createForElasticsearch();
}
private static ElasticNodeInfo nodeInfo;
private static ElasticContainer<?> container;
@BeforeClass
public static void startIndexService() {
if (nodeInfo != null) {
// do not start Elasticsearch twice
return;
}
container = ElasticContainer.createAndStart(ElasticVersion.V5_6);
nodeInfo = new ElasticNodeInfo(container.getHttpHost().getPort());
}
@AfterClass
public static void stopElasticsearchServer() {
if (container != null) {
container.stop();
}
}
private String testName() {
return testName.getMethodName().toLowerCase() + "_";
}
@Override
protected void initAfterLifecycleStart() throws Exception {
super.initAfterLifecycleStart();
ElasticTestUtils.createAllIndexes(injector);
}
@Override
protected Injector createInjector() {
Config elasticsearchConfig = new Config(config);
InMemoryModule.setDefaults(elasticsearchConfig);
String indicesPrefix = testName();
ElasticTestUtils.configure(elasticsearchConfig, nodeInfo.port, indicesPrefix);
return Guice.createInjector(new InMemoryModule(elasticsearchConfig, notesMigration));
}
}

View File

@ -2901,6 +2901,14 @@ public abstract class AbstractQueryChangesTest extends GerritServerTests {
assertQuery("query:query4");
}
@Test
public void byOwnerInvalidQuery() throws Exception {
TestRepository<Repo> repo = createProject("repo");
insert(repo, newChange(repo), userId);
String nameEmail = user.asIdentifiedUser().getNameEmail();
assertQuery("owner: \"" + nameEmail + "\"\\");
}
protected ChangeInserter newChange(TestRepository<Repo> repo) throws Exception {
return newChange(repo, null, null, null, null, false);
}

View File

@ -70,6 +70,7 @@ public class LuceneQueryChangesTest extends AbstractQueryChangesTest {
}
@Test
@Override
public void byOwnerInvalidQuery() throws Exception {
TestRepository<Repo> repo = createProject("repo");
Change change1 = insert(repo, newChange(repo), userId);

View File

@ -18,9 +18,16 @@ package(
load("@io_bazel_rules_closure//closure:defs.bzl", "closure_js_library")
genrule(
name = "polymer_closure_renamed",
srcs = ["@polymer_closure//file"],
outs = ["polymer_closure_renamed.js"],
cmd = "cp $< $@",
)
closure_js_library(
name = "polymer_closure",
srcs = ["@polymer_closure//file"],
srcs = [":polymer_closure_renamed"],
data = ["//lib:LICENSE-Apache2.0"],
no_closure_library = True,
)

View File

@ -1,2 +1,5 @@
build --workspace_status_command=./tools/workspace-status.sh --strategy=Closure=worker
build --disk_cache=~/.gerritcodereview/bazel-cache/cas
build --repository_cache=~/.gerritcodereview/bazel-cache/repository
build --experimental_strict_action_env
test --build_tests_only