Bump Lucene version to 6.6.5
We cannot skip major Lucene version and upgrade directly from 5 to 7, because backwards codec compatibility layer only supports one major release. To support online reindexing we have to do the upgrade to Lucene 7 in two steps and wait for one gerrit release between the Lucene version upgrades. There are some removal and deprecation compared to current 5.5 version: * IntField, LongField are deprecated and supposed to be replaced with IntPoint and LongPoint. We use the deprecated Legacy<Foo> field types. These types were moved to Solr distribution and could be used even in the next major version 7. * TrackingIndexWriter functionality has been folded into IndexWriter in 6.2. Its APIs now return a long sequence number which can be passed to ControlledRealTimeReopenThread directly without TrackingIndexWriter acting as a mediator. Feature: Issue 7856 Change-Id: I25e8454e2b541eb5d8aac1e98d011c65b5e12240
This commit is contained in:
12
WORKSPACE
12
WORKSPACE
@@ -599,36 +599,36 @@ maven_jar(
|
||||
sha1 = "05b6f921f1810bdf90e25471968f741f87168b64",
|
||||
)
|
||||
|
||||
LUCENE_VERS = "5.5.4"
|
||||
LUCENE_VERS = "6.6.5"
|
||||
|
||||
maven_jar(
|
||||
name = "lucene-core",
|
||||
artifact = "org.apache.lucene:lucene-core:" + LUCENE_VERS,
|
||||
sha1 = "ab9c77e75cf142aa6e284b310c8395617bd9b19b",
|
||||
sha1 = "2983f80b1037e098209657b0ca9176827892d0c0",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "lucene-analyzers-common",
|
||||
artifact = "org.apache.lucene:lucene-analyzers-common:" + LUCENE_VERS,
|
||||
sha1 = "08ce9d34c8124c80e176e8332ee947480bbb9576",
|
||||
sha1 = "6094f91071d90570b7f5f8ce481d5de7d2d2e9d5",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "backward-codecs",
|
||||
artifact = "org.apache.lucene:lucene-backward-codecs:" + LUCENE_VERS,
|
||||
sha1 = "a933f42e758c54c43083398127ea7342b54d8212",
|
||||
sha1 = "460a19e8d1aa7d31e9614cf528a6cb508c9e823d",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "lucene-misc",
|
||||
artifact = "org.apache.lucene:lucene-misc:" + LUCENE_VERS,
|
||||
sha1 = "a74388857f73614e528ae44d742c60187cb55a5a",
|
||||
sha1 = "ce3a1b7b6a92b9af30791356a4bd46d1cea6cc1e",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "lucene-queryparser",
|
||||
artifact = "org.apache.lucene:lucene-queryparser:" + LUCENE_VERS,
|
||||
sha1 = "8a06fad4675473d98d93b61fea529e3f464bf69e",
|
||||
sha1 = "2db9ca0086a4b8e0b9bc9f08a9b420303168e37c",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
|
||||
@@ -33,8 +33,8 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.util.CharArraySet;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
|
||||
@@ -64,15 +64,14 @@ import java.util.function.Function;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.IntField;
|
||||
import org.apache.lucene.document.LongField;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TrackingIndexWriter;
|
||||
import org.apache.lucene.search.ControlledRealTimeReopenThread;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
@@ -86,6 +85,7 @@ import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/** Basic Lucene index implementation. */
|
||||
@SuppressWarnings("deprecation")
|
||||
public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||
|
||||
@@ -98,7 +98,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
private final Directory dir;
|
||||
private final String name;
|
||||
private final ListeningExecutorService writerThread;
|
||||
private final TrackingIndexWriter writer;
|
||||
private final IndexWriter writer;
|
||||
private final ReferenceManager<IndexSearcher> searcherManager;
|
||||
private final ControlledRealTimeReopenThread<IndexSearcher> reopenThread;
|
||||
private final Set<NrtFuture> notDoneNrtFutures;
|
||||
@@ -118,17 +118,16 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
this.dir = dir;
|
||||
this.name = name;
|
||||
String index = Joiner.on('_').skipNulls().join(name, subIndex);
|
||||
IndexWriter delegateWriter;
|
||||
long commitPeriod = writerConfig.getCommitWithinMs();
|
||||
|
||||
if (commitPeriod < 0) {
|
||||
delegateWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig());
|
||||
writer = new AutoCommitWriter(dir, writerConfig.getLuceneConfig());
|
||||
} else if (commitPeriod == 0) {
|
||||
delegateWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), true);
|
||||
writer = new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), true);
|
||||
} else {
|
||||
final AutoCommitWriter autoCommitWriter =
|
||||
new AutoCommitWriter(dir, writerConfig.getLuceneConfig());
|
||||
delegateWriter = autoCommitWriter;
|
||||
writer = autoCommitWriter;
|
||||
|
||||
autoCommitExecutor =
|
||||
new LoggingContextAwareScheduledExecutorService(
|
||||
@@ -165,8 +164,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
commitPeriod,
|
||||
MILLISECONDS);
|
||||
}
|
||||
writer = new TrackingIndexWriter(delegateWriter);
|
||||
searcherManager = new WrappableSearcherManager(writer.getIndexWriter(), true, searcherFactory);
|
||||
searcherManager = new WrappableSearcherManager(writer, true, searcherFactory);
|
||||
|
||||
notDoneNrtFutures = Sets.newConcurrentHashSet();
|
||||
|
||||
@@ -251,7 +249,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
}
|
||||
|
||||
try {
|
||||
writer.getIndexWriter().close();
|
||||
writer.close();
|
||||
} catch (AlreadyClosedException e) {
|
||||
// Ignore.
|
||||
} catch (IOException e) {
|
||||
@@ -294,7 +292,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
writer.deleteAll();
|
||||
}
|
||||
|
||||
public TrackingIndexWriter getWriter() {
|
||||
public IndexWriter getWriter() {
|
||||
return writer;
|
||||
}
|
||||
|
||||
@@ -325,15 +323,15 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
|
||||
if (type == FieldType.INTEGER || type == FieldType.INTEGER_RANGE) {
|
||||
for (Object value : values.getValues()) {
|
||||
doc.add(new IntField(name, (Integer) value, store));
|
||||
doc.add(new LegacyIntField(name, (Integer) value, store));
|
||||
}
|
||||
} else if (type == FieldType.LONG) {
|
||||
for (Object value : values.getValues()) {
|
||||
doc.add(new LongField(name, (Long) value, store));
|
||||
doc.add(new LegacyLongField(name, (Long) value, store));
|
||||
}
|
||||
} else if (type == FieldType.TIMESTAMP) {
|
||||
for (Object value : values.getValues()) {
|
||||
doc.add(new LongField(name, ((Timestamp) value).getTime(), store));
|
||||
doc.add(new LegacyLongField(name, ((Timestamp) value).getTime(), store));
|
||||
}
|
||||
} else if (type == FieldType.EXACT || type == FieldType.PREFIX) {
|
||||
for (Object value : values.getValues()) {
|
||||
|
||||
@@ -47,58 +47,64 @@ public class AutoCommitWriter extends IndexWriter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
super.addDocument(doc);
|
||||
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
long ret = super.addDocument(doc);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
public long addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
throws IOException {
|
||||
super.addDocuments(docs);
|
||||
long ret = super.addDocuments(docs);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateDocuments(
|
||||
public long updateDocuments(
|
||||
Term delTerm, Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
throws IOException {
|
||||
super.updateDocuments(delTerm, docs);
|
||||
long ret = super.updateDocuments(delTerm, docs);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteDocuments(Term... term) throws IOException {
|
||||
super.deleteDocuments(term);
|
||||
public long deleteDocuments(Term... term) throws IOException {
|
||||
long ret = super.deleteDocuments(term);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean tryDeleteDocument(IndexReader readerIn, int docID)
|
||||
throws IOException {
|
||||
boolean ret = super.tryDeleteDocument(readerIn, docID);
|
||||
if (ret) {
|
||||
public synchronized long tryDeleteDocument(IndexReader readerIn, int docID) throws IOException {
|
||||
long ret = super.tryDeleteDocument(readerIn, docID);
|
||||
if (ret != -1) {
|
||||
autoFlush();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteDocuments(Query... queries) throws IOException {
|
||||
super.deleteDocuments(queries);
|
||||
public long deleteDocuments(Query... queries) throws IOException {
|
||||
long ret = super.deleteDocuments(queries);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException {
|
||||
super.updateDocument(term, doc);
|
||||
public long updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException {
|
||||
long ret = super.updateDocument(term, doc);
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteAll() throws IOException {
|
||||
super.deleteAll();
|
||||
public long deleteAll() throws IOException {
|
||||
long ret = super.deleteAll();
|
||||
autoFlush();
|
||||
return ret;
|
||||
}
|
||||
|
||||
void manualFlush() throws IOException {
|
||||
|
||||
@@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MINUTES;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.gerrit.server.config.ConfigUtil;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.util.CharArraySet;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.eclipse.jgit.lib.Config;
|
||||
|
||||
@@ -38,19 +38,20 @@ import java.util.List;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class QueryBuilder<V> {
|
||||
static Term intTerm(String name, int value) {
|
||||
BytesRefBuilder builder = new BytesRefBuilder();
|
||||
NumericUtils.intToPrefixCoded(value, 0, builder);
|
||||
LegacyNumericUtils.intToPrefixCoded(value, 0, builder);
|
||||
return new Term(name, builder.get());
|
||||
}
|
||||
|
||||
@@ -180,7 +181,8 @@ public class QueryBuilder<V> {
|
||||
// Just fall back to a standard integer query.
|
||||
return new TermQuery(intTerm(p.getField().getName(), minimum));
|
||||
}
|
||||
return NumericRangeQuery.newIntRange(r.getField().getName(), minimum, maximum, true, true);
|
||||
return LegacyNumericRangeQuery.newIntRange(
|
||||
r.getField().getName(), minimum, maximum, true, true);
|
||||
}
|
||||
throw new QueryParseException("not an integer range: " + p);
|
||||
}
|
||||
@@ -188,7 +190,7 @@ public class QueryBuilder<V> {
|
||||
private Query timestampQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
if (p instanceof TimestampRangePredicate) {
|
||||
TimestampRangePredicate<V> r = (TimestampRangePredicate<V>) p;
|
||||
return NumericRangeQuery.newLongRange(
|
||||
return LegacyNumericRangeQuery.newLongRange(
|
||||
r.getField().getName(),
|
||||
r.getMinTimestamp().getTime(),
|
||||
r.getMaxTimestamp().getTime(),
|
||||
@@ -200,7 +202,7 @@ public class QueryBuilder<V> {
|
||||
|
||||
private Query notTimestamp(TimestampRangePredicate<V> r) throws QueryParseException {
|
||||
if (r.getMinTimestamp().getTime() == 0) {
|
||||
return NumericRangeQuery.newLongRange(
|
||||
return LegacyNumericRangeQuery.newLongRange(
|
||||
r.getField().getName(), r.getMaxTimestamp().getTime(), null, true, true);
|
||||
}
|
||||
throw new QueryParseException("cannot negate: " + r);
|
||||
|
||||
@@ -81,11 +81,17 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
WrappableSearcherManager(
|
||||
IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
// TODO(davido): Make it configurable
|
||||
// If true, new deletes will be written down to index files instead of carried over from writer
|
||||
// to reader directly in heap
|
||||
boolean writeAllDeletes = false;
|
||||
if (searcherFactory == null) {
|
||||
searcherFactory = new SearcherFactory();
|
||||
}
|
||||
this.searcherFactory = searcherFactory;
|
||||
current = getSearcher(searcherFactory, DirectoryReader.open(writer, applyAllDeletes));
|
||||
current =
|
||||
getSearcher(
|
||||
searcherFactory, DirectoryReader.open(writer, applyAllDeletes, writeAllDeletes));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
package com.google.gerrit.server.documentation;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.flogger.FluentLogger;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.zip.ZipEntry;
|
||||
@@ -85,9 +85,9 @@ public class QueryDocumentationExecutor {
|
||||
// and skipped paging. Maybe add paging later.
|
||||
TopDocs results = searcher.search(query, Integer.MAX_VALUE);
|
||||
ScoreDoc[] hits = results.scoreDocs;
|
||||
int totalHits = results.totalHits;
|
||||
long totalHits = results.totalHits;
|
||||
|
||||
List<DocResult> out = Lists.newArrayListWithCapacity(totalHits);
|
||||
List<DocResult> out = new ArrayList<>();
|
||||
for (int i = 0; i < totalHits; i++) {
|
||||
DocResult result = new DocResult();
|
||||
Document doc = searcher.doc(hits[i].doc);
|
||||
|
||||
@@ -42,8 +42,12 @@ public class AccountSchemaDefinitions extends SchemaDefinitions<AccountState> {
|
||||
|
||||
@Deprecated static final Schema<AccountState> V7 = schema(V6, AccountField.PREFERRED_EMAIL_EXACT);
|
||||
|
||||
@Deprecated
|
||||
static final Schema<AccountState> V8 = schema(V7, AccountField.NAME_PART_NO_SECONDARY_EMAIL);
|
||||
|
||||
// Bump Lucene version requires reindexing
|
||||
static final Schema<AccountState> V9 = schema(V8);
|
||||
|
||||
public static final String NAME = "accounts";
|
||||
public static final AccountSchemaDefinitions INSTANCE = new AccountSchemaDefinitions();
|
||||
|
||||
|
||||
@@ -96,7 +96,10 @@ public class ChangeSchemaDefinitions extends SchemaDefinitions<ChangeData> {
|
||||
// Rename of star label 'mute' to 'reviewed' requires reindexing
|
||||
@Deprecated static final Schema<ChangeData> V48 = schema(V47);
|
||||
|
||||
static final Schema<ChangeData> V49 = schema(V48);
|
||||
@Deprecated static final Schema<ChangeData> V49 = schema(V48);
|
||||
|
||||
// Bump Lucene version requires reindexing
|
||||
static final Schema<ChangeData> V50 = schema(V49);
|
||||
|
||||
public static final String NAME = "changes";
|
||||
public static final ChangeSchemaDefinitions INSTANCE = new ChangeSchemaDefinitions();
|
||||
|
||||
@@ -37,7 +37,10 @@ public class GroupSchemaDefinitions extends SchemaDefinitions<InternalGroup> {
|
||||
@Deprecated
|
||||
static final Schema<InternalGroup> V4 = schema(V3, GroupField.MEMBER, GroupField.SUBGROUP);
|
||||
|
||||
static final Schema<InternalGroup> V5 = schema(V4, GroupField.REF_STATE);
|
||||
@Deprecated static final Schema<InternalGroup> V5 = schema(V4, GroupField.REF_STATE);
|
||||
|
||||
// Bump Lucene version requires reindexing
|
||||
static final Schema<InternalGroup> V6 = schema(V5);
|
||||
|
||||
public static final GroupSchemaDefinitions INSTANCE = new GroupSchemaDefinitions();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user