Format all Java files with google-java-format
Having a standard tool for formatting saves reviewers' valuable time. google-java-format is Google's standard formatter and is somewhat inspired by gofmt[1]. This commit formats everything using google-java-format version 1.2. The downside of this one-off formatting is breaking blame. This can be somewhat hacked around with a tool like git-hyper-blame[2], but it's definitely not optimal until/unless this kind of feature makes its way to git core. Not in this change: * Tool support, e.g. Eclipse. The command must be run manually [3]. * Documentation of best practice, e.g. new 100-column default. [1] https://talks.golang.org/2015/gofmt-en.slide#3 [2] https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/git-hyper-blame.html [3] git ls-files | grep java$ | xargs google-java-format -i Change-Id: Id5f3c6de95ce0b68b41f0a478b5c99a93675aaa3 Signed-off-by: David Pursehouse <dpursehouse@collab.net>
This commit is contained in:

committed by
David Pursehouse

parent
6723b6d0fa
commit
292fa154c1
@@ -33,7 +33,16 @@ import com.google.gerrit.server.index.Index;
|
||||
import com.google.gerrit.server.index.IndexUtils;
|
||||
import com.google.gerrit.server.index.Schema;
|
||||
import com.google.gerrit.server.index.Schema.Values;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
@@ -55,21 +64,9 @@ import org.apache.lucene.store.Directory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/** Basic Lucene index implementation. */
|
||||
public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
private static final Logger log =
|
||||
LoggerFactory.getLogger(AbstractLuceneIndex.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(AbstractLuceneIndex.class);
|
||||
|
||||
static String sortFieldName(FieldDef<?, ?> f) {
|
||||
return f.getName() + "_SORT";
|
||||
@@ -93,7 +90,8 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
String name,
|
||||
String subIndex,
|
||||
GerritIndexWriterConfig writerConfig,
|
||||
SearcherFactory searcherFactory) throws IOException {
|
||||
SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
this.schema = schema;
|
||||
this.sitePaths = sitePaths;
|
||||
this.dir = dir;
|
||||
@@ -105,18 +103,21 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
if (commitPeriod < 0) {
|
||||
delegateWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig());
|
||||
} else if (commitPeriod == 0) {
|
||||
delegateWriter =
|
||||
new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), true);
|
||||
delegateWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), true);
|
||||
} else {
|
||||
final AutoCommitWriter autoCommitWriter =
|
||||
new AutoCommitWriter(dir, writerConfig.getLuceneConfig());
|
||||
delegateWriter = autoCommitWriter;
|
||||
|
||||
autoCommitExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder()
|
||||
.setNameFormat(index + " Commit-%d")
|
||||
.setDaemon(true)
|
||||
.build());
|
||||
autoCommitExecutor.scheduleAtFixedRate(new Runnable() {
|
||||
autoCommitExecutor =
|
||||
new ScheduledThreadPoolExecutor(
|
||||
1,
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat(index + " Commit-%d")
|
||||
.setDaemon(true)
|
||||
.build());
|
||||
autoCommitExecutor.scheduleAtFixedRate(
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
@@ -131,34 +132,42 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
try {
|
||||
autoCommitWriter.close();
|
||||
} catch (IOException e2) {
|
||||
log.error("SEVERE: Error closing " + index
|
||||
+ " Lucene index after OOM; index may be corrupted.", e);
|
||||
log.error(
|
||||
"SEVERE: Error closing "
|
||||
+ index
|
||||
+ " Lucene index after OOM; index may be corrupted.",
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, commitPeriod, commitPeriod, MILLISECONDS);
|
||||
},
|
||||
commitPeriod,
|
||||
commitPeriod,
|
||||
MILLISECONDS);
|
||||
}
|
||||
writer = new TrackingIndexWriter(delegateWriter);
|
||||
searcherManager = new WrappableSearcherManager(
|
||||
writer.getIndexWriter(), true, searcherFactory);
|
||||
searcherManager = new WrappableSearcherManager(writer.getIndexWriter(), true, searcherFactory);
|
||||
|
||||
notDoneNrtFutures = Sets.newConcurrentHashSet();
|
||||
|
||||
writerThread = MoreExecutors.listeningDecorator(
|
||||
Executors.newFixedThreadPool(1,
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat(index + " Write-%d")
|
||||
.setDaemon(true)
|
||||
.build()));
|
||||
writerThread =
|
||||
MoreExecutors.listeningDecorator(
|
||||
Executors.newFixedThreadPool(
|
||||
1,
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat(index + " Write-%d")
|
||||
.setDaemon(true)
|
||||
.build()));
|
||||
|
||||
reopenThread = new ControlledRealTimeReopenThread<>(
|
||||
writer, searcherManager,
|
||||
0.500 /* maximum stale age (seconds) */,
|
||||
0.010 /* minimum stale age (seconds) */);
|
||||
reopenThread =
|
||||
new ControlledRealTimeReopenThread<>(
|
||||
writer,
|
||||
searcherManager,
|
||||
0.500 /* maximum stale age (seconds) */,
|
||||
0.010 /* minimum stale age (seconds) */);
|
||||
reopenThread.setName(index + " NRT");
|
||||
reopenThread.setPriority(Math.min(
|
||||
Thread.currentThread().getPriority() + 2,
|
||||
Thread.MAX_PRIORITY));
|
||||
reopenThread.setPriority(
|
||||
Math.min(Thread.currentThread().getPriority() + 2, Thread.MAX_PRIORITY));
|
||||
reopenThread.setDaemon(true);
|
||||
|
||||
// This must be added after the reopen thread is created. The reopen thread
|
||||
@@ -169,18 +178,18 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
// internal listener needs to be called first.
|
||||
// TODO(dborowitz): This may have been fixed by
|
||||
// http://issues.apache.org/jira/browse/LUCENE-5461
|
||||
searcherManager.addListener(new RefreshListener() {
|
||||
@Override
|
||||
public void beforeRefresh() throws IOException {
|
||||
}
|
||||
searcherManager.addListener(
|
||||
new RefreshListener() {
|
||||
@Override
|
||||
public void beforeRefresh() throws IOException {}
|
||||
|
||||
@Override
|
||||
public void afterRefresh(boolean didRefresh) throws IOException {
|
||||
for (NrtFuture f : notDoneNrtFutures) {
|
||||
f.removeIfDone();
|
||||
}
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public void afterRefresh(boolean didRefresh) throws IOException {
|
||||
for (NrtFuture f : notDoneNrtFutures) {
|
||||
f.removeIfDone();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
reopenThread.start();
|
||||
}
|
||||
@@ -202,8 +211,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
log.warn("shutting down " + name + " index with pending Lucene writes");
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
log.warn("interrupted waiting for pending Lucene writes of " + name +
|
||||
" index", e);
|
||||
log.warn("interrupted waiting for pending Lucene writes of " + name + " index", e);
|
||||
}
|
||||
reopenThread.close();
|
||||
|
||||
@@ -235,44 +243,48 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
}
|
||||
|
||||
ListenableFuture<?> insert(final Document doc) {
|
||||
return submit(new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.addDocument(doc);
|
||||
}
|
||||
});
|
||||
return submit(
|
||||
new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.addDocument(doc);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ListenableFuture<?> replace(final Term term, final Document doc) {
|
||||
return submit(new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.updateDocument(term, doc);
|
||||
}
|
||||
});
|
||||
return submit(
|
||||
new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.updateDocument(term, doc);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ListenableFuture<?> delete(final Term term) {
|
||||
return submit(new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.deleteDocuments(term);
|
||||
}
|
||||
});
|
||||
return submit(
|
||||
new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws IOException, InterruptedException {
|
||||
return writer.deleteDocuments(term);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private ListenableFuture<?> submit(Callable<Long> task) {
|
||||
ListenableFuture<Long> future =
|
||||
Futures.nonCancellationPropagating(writerThread.submit(task));
|
||||
return Futures.transformAsync(future, new AsyncFunction<Long, Void>() {
|
||||
@Override
|
||||
public ListenableFuture<Void> apply(Long gen) throws InterruptedException {
|
||||
// Tell the reopen thread a future is waiting on this
|
||||
// generation so it uses the min stale time when refreshing.
|
||||
reopenThread.waitForGeneration(gen, 0);
|
||||
return new NrtFuture(gen);
|
||||
}
|
||||
});
|
||||
ListenableFuture<Long> future = Futures.nonCancellationPropagating(writerThread.submit(task));
|
||||
return Futures.transformAsync(
|
||||
future,
|
||||
new AsyncFunction<Long, Void>() {
|
||||
@Override
|
||||
public ListenableFuture<Void> apply(Long gen) throws InterruptedException {
|
||||
// Tell the reopen thread a future is waiting on this
|
||||
// generation so it uses the min stale time when refreshing.
|
||||
reopenThread.waitForGeneration(gen, 0);
|
||||
return new NrtFuture(gen);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -319,8 +331,7 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
for (Object value : values.getValues()) {
|
||||
doc.add(new LongField(name, ((Timestamp) value).getTime(), store));
|
||||
}
|
||||
} else if (type == FieldType.EXACT
|
||||
|| type == FieldType.PREFIX) {
|
||||
} else if (type == FieldType.EXACT || type == FieldType.PREFIX) {
|
||||
for (Object value : values.getValues()) {
|
||||
doc.add(new StringField(name, (String) value, store));
|
||||
}
|
||||
@@ -358,8 +369,8 @@ public abstract class AbstractLuceneIndex<K, V> implements Index<K, V> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void get(long timeout, TimeUnit unit) throws InterruptedException,
|
||||
TimeoutException, ExecutionException {
|
||||
public Void get(long timeout, TimeUnit unit)
|
||||
throws InterruptedException, TimeoutException, ExecutionException {
|
||||
if (!isDone()) {
|
||||
if (!reopenThread.waitForGeneration(gen, (int) unit.toMillis(timeout))) {
|
||||
throw new TimeoutException();
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
package com.google.gerrit.lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
@@ -22,26 +23,22 @@ import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Writer that optionally flushes/commits after every write. */
|
||||
public class AutoCommitWriter extends IndexWriter {
|
||||
private boolean autoCommit;
|
||||
|
||||
AutoCommitWriter(Directory dir, IndexWriterConfig config)
|
||||
throws IOException {
|
||||
AutoCommitWriter(Directory dir, IndexWriterConfig config) throws IOException {
|
||||
this(dir, config, false);
|
||||
}
|
||||
|
||||
AutoCommitWriter(Directory dir, IndexWriterConfig config, boolean autoCommit)
|
||||
throws IOException {
|
||||
AutoCommitWriter(Directory dir, IndexWriterConfig config, boolean autoCommit) throws IOException {
|
||||
super(dir, config);
|
||||
setAutoCommit(autoCommit);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will override Gerrit configuration index.name.commitWithin
|
||||
* until next Gerrit restart (or reconfiguration through this method).
|
||||
* This method will override Gerrit configuration index.name.commitWithin until next Gerrit
|
||||
* restart (or reconfiguration through this method).
|
||||
*
|
||||
* @param enable auto commit
|
||||
*/
|
||||
@@ -50,23 +47,21 @@ public class AutoCommitWriter extends IndexWriter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDocument(Iterable<? extends IndexableField> doc)
|
||||
throws IOException {
|
||||
public void addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
super.addDocument(doc);
|
||||
autoFlush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDocuments(
|
||||
Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
throws IOException {
|
||||
super.addDocuments(docs);
|
||||
autoFlush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateDocuments(Term delTerm,
|
||||
Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
public void updateDocuments(
|
||||
Term delTerm, Iterable<? extends Iterable<? extends IndexableField>> docs)
|
||||
throws IOException {
|
||||
super.updateDocuments(delTerm, docs);
|
||||
autoFlush();
|
||||
@@ -95,8 +90,7 @@ public class AutoCommitWriter extends IndexWriter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateDocument(Term term, Iterable<? extends IndexableField> doc)
|
||||
throws IOException {
|
||||
public void updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException {
|
||||
super.updateDocument(term, doc);
|
||||
autoFlush();
|
||||
}
|
||||
|
@@ -31,17 +31,15 @@ import com.google.gerrit.server.query.DataSource;
|
||||
import com.google.gerrit.server.query.Predicate;
|
||||
import com.google.gerrit.server.query.QueryParseException;
|
||||
import com.google.gerrit.server.query.change.ChangeData;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Timestamp;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.search.SearcherFactory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Timestamp;
|
||||
|
||||
public class ChangeSubIndex extends AbstractLuceneIndex<Change.Id, ChangeData>
|
||||
implements ChangeIndex {
|
||||
ChangeSubIndex(
|
||||
@@ -49,9 +47,15 @@ public class ChangeSubIndex extends AbstractLuceneIndex<Change.Id, ChangeData>
|
||||
SitePaths sitePaths,
|
||||
Path path,
|
||||
GerritIndexWriterConfig writerConfig,
|
||||
SearcherFactory searcherFactory) throws IOException {
|
||||
this(schema, sitePaths, FSDirectory.open(path),
|
||||
path.getFileName().toString(), writerConfig, searcherFactory);
|
||||
SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
this(
|
||||
schema,
|
||||
sitePaths,
|
||||
FSDirectory.open(path),
|
||||
path.getFileName().toString(),
|
||||
writerConfig,
|
||||
searcherFactory);
|
||||
}
|
||||
|
||||
ChangeSubIndex(
|
||||
@@ -60,28 +64,25 @@ public class ChangeSubIndex extends AbstractLuceneIndex<Change.Id, ChangeData>
|
||||
Directory dir,
|
||||
String subIndex,
|
||||
GerritIndexWriterConfig writerConfig,
|
||||
SearcherFactory searcherFactory) throws IOException {
|
||||
super(schema, sitePaths, dir, NAME, subIndex, writerConfig,
|
||||
searcherFactory);
|
||||
SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
super(schema, sitePaths, dir, NAME, subIndex, writerConfig, searcherFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replace(ChangeData obj) throws IOException {
|
||||
throw new UnsupportedOperationException(
|
||||
"don't use ChangeSubIndex directly");
|
||||
throw new UnsupportedOperationException("don't use ChangeSubIndex directly");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Change.Id key) throws IOException {
|
||||
throw new UnsupportedOperationException(
|
||||
"don't use ChangeSubIndex directly");
|
||||
throw new UnsupportedOperationException("don't use ChangeSubIndex directly");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataSource<ChangeData> getSource(Predicate<ChangeData> p,
|
||||
QueryOptions opts) throws QueryParseException {
|
||||
throw new UnsupportedOperationException(
|
||||
"don't use ChangeSubIndex directly");
|
||||
public DataSource<ChangeData> getSource(Predicate<ChangeData> p, QueryOptions opts)
|
||||
throws QueryParseException {
|
||||
throw new UnsupportedOperationException("don't use ChangeSubIndex directly");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -14,36 +14,32 @@
|
||||
|
||||
package com.google.gerrit.lucene;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.AnalyzerWrapper;
|
||||
import org.apache.lucene.analysis.charfilter.MappingCharFilter;
|
||||
import org.apache.lucene.analysis.charfilter.NormalizeCharMap;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This analyzer can be used to provide custom char mappings.
|
||||
*
|
||||
* <p>Example usage:
|
||||
*
|
||||
* <pre class="prettyprint">
|
||||
* {@code
|
||||
* <pre class="prettyprint">{@code
|
||||
* Map<String,String> customMapping = new HashMap<>();
|
||||
* customMapping.put("_", " ");
|
||||
* customMapping.put(".", " ");
|
||||
*
|
||||
* CustomMappingAnalyzer analyzer =
|
||||
* new CustomMappingAnalyzer(new StandardAnalyzer(version), customMapping);
|
||||
* }
|
||||
* </pre>
|
||||
* }</pre>
|
||||
*/
|
||||
public class CustomMappingAnalyzer extends AnalyzerWrapper {
|
||||
private Analyzer delegate;
|
||||
private Map<String, String> customMappings;
|
||||
|
||||
public CustomMappingAnalyzer(Analyzer delegate,
|
||||
Map<String, String> customMappings) {
|
||||
public CustomMappingAnalyzer(Analyzer delegate, Map<String, String> customMappings) {
|
||||
super(delegate.getReuseStrategy());
|
||||
this.delegate = delegate;
|
||||
this.customMappings = customMappings;
|
||||
|
@@ -19,19 +19,14 @@ import static java.util.concurrent.TimeUnit.MINUTES;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.gerrit.server.config.ConfigUtil;
|
||||
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.util.CharArraySet;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.eclipse.jgit.lib.Config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Combination of Lucene {@link IndexWriterConfig} with additional
|
||||
* Gerrit-specific options.
|
||||
*/
|
||||
/** Combination of Lucene {@link IndexWriterConfig} with additional Gerrit-specific options. */
|
||||
class GerritIndexWriterConfig {
|
||||
private static final Map<String, String> CUSTOM_CHAR_MAPPING =
|
||||
ImmutableMap.of("_", " ", ".", " ");
|
||||
@@ -42,22 +37,26 @@ class GerritIndexWriterConfig {
|
||||
|
||||
GerritIndexWriterConfig(Config cfg, String name) {
|
||||
analyzer =
|
||||
new CustomMappingAnalyzer(new StandardAnalyzer(
|
||||
CharArraySet.EMPTY_SET), CUSTOM_CHAR_MAPPING);
|
||||
luceneConfig = new IndexWriterConfig(analyzer)
|
||||
.setOpenMode(OpenMode.CREATE_OR_APPEND)
|
||||
.setCommitOnClose(true);
|
||||
new CustomMappingAnalyzer(
|
||||
new StandardAnalyzer(CharArraySet.EMPTY_SET), CUSTOM_CHAR_MAPPING);
|
||||
luceneConfig =
|
||||
new IndexWriterConfig(analyzer)
|
||||
.setOpenMode(OpenMode.CREATE_OR_APPEND)
|
||||
.setCommitOnClose(true);
|
||||
double m = 1 << 20;
|
||||
luceneConfig.setRAMBufferSizeMB(cfg.getLong(
|
||||
"index", name, "ramBufferSize",
|
||||
(long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB * m)) / m);
|
||||
luceneConfig.setMaxBufferedDocs(cfg.getInt(
|
||||
"index", name, "maxBufferedDocs",
|
||||
IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS));
|
||||
luceneConfig.setRAMBufferSizeMB(
|
||||
cfg.getLong(
|
||||
"index",
|
||||
name,
|
||||
"ramBufferSize",
|
||||
(long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB * m))
|
||||
/ m);
|
||||
luceneConfig.setMaxBufferedDocs(
|
||||
cfg.getInt("index", name, "maxBufferedDocs", IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS));
|
||||
try {
|
||||
commitWithinMs =
|
||||
ConfigUtil.getTimeUnit(cfg, "index", name, "commitWithin",
|
||||
MILLISECONDS.convert(5, MINUTES), MILLISECONDS);
|
||||
ConfigUtil.getTimeUnit(
|
||||
cfg, "index", name, "commitWithin", MILLISECONDS.convert(5, MINUTES), MILLISECONDS);
|
||||
} catch (IllegalArgumentException e) {
|
||||
commitWithinMs = cfg.getLong("index", name, "commitWithin", 0);
|
||||
}
|
||||
|
@@ -33,7 +33,13 @@ import com.google.gwtorm.server.ResultSet;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provider;
|
||||
import com.google.inject.assistedinject.Assisted;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
@@ -50,19 +56,9 @@ import org.eclipse.jgit.lib.Config;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class LuceneAccountIndex
|
||||
extends AbstractLuceneIndex<Account.Id, AccountState>
|
||||
public class LuceneAccountIndex extends AbstractLuceneIndex<Account.Id, AccountState>
|
||||
implements AccountIndex {
|
||||
private static final Logger log =
|
||||
LoggerFactory.getLogger(LuceneAccountIndex.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(LuceneAccountIndex.class);
|
||||
|
||||
private static final String ACCOUNTS = "accounts";
|
||||
|
||||
@@ -80,13 +76,12 @@ public class LuceneAccountIndex
|
||||
private final QueryBuilder<AccountState> queryBuilder;
|
||||
private final Provider<AccountCache> accountCache;
|
||||
|
||||
private static Directory dir(Schema<AccountState> schema, Config cfg,
|
||||
SitePaths sitePaths) throws IOException {
|
||||
private static Directory dir(Schema<AccountState> schema, Config cfg, SitePaths sitePaths)
|
||||
throws IOException {
|
||||
if (LuceneIndexModule.isInMemoryTest(cfg)) {
|
||||
return new RAMDirectory();
|
||||
}
|
||||
Path indexDir =
|
||||
LuceneVersionManager.getDir(sitePaths, ACCOUNTS + "_", schema);
|
||||
Path indexDir = LuceneVersionManager.getDir(sitePaths, ACCOUNTS + "_", schema);
|
||||
return FSDirectory.open(indexDir);
|
||||
}
|
||||
|
||||
@@ -95,13 +90,19 @@ public class LuceneAccountIndex
|
||||
@GerritServerConfig Config cfg,
|
||||
SitePaths sitePaths,
|
||||
Provider<AccountCache> accountCache,
|
||||
@Assisted Schema<AccountState> schema) throws IOException {
|
||||
super(schema, sitePaths, dir(schema, cfg, sitePaths), ACCOUNTS, null,
|
||||
new GerritIndexWriterConfig(cfg, ACCOUNTS), new SearcherFactory());
|
||||
@Assisted Schema<AccountState> schema)
|
||||
throws IOException {
|
||||
super(
|
||||
schema,
|
||||
sitePaths,
|
||||
dir(schema, cfg, sitePaths),
|
||||
ACCOUNTS,
|
||||
null,
|
||||
new GerritIndexWriterConfig(cfg, ACCOUNTS),
|
||||
new SearcherFactory());
|
||||
this.accountCache = accountCache;
|
||||
|
||||
indexWriterConfig =
|
||||
new GerritIndexWriterConfig(cfg, ACCOUNTS);
|
||||
indexWriterConfig = new GerritIndexWriterConfig(cfg, ACCOUNTS);
|
||||
queryBuilder = new QueryBuilder<>(schema, indexWriterConfig.getAnalyzer());
|
||||
}
|
||||
|
||||
@@ -125,13 +126,12 @@ public class LuceneAccountIndex
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataSource<AccountState> getSource(Predicate<AccountState> p,
|
||||
QueryOptions opts) throws QueryParseException {
|
||||
public DataSource<AccountState> getSource(Predicate<AccountState> p, QueryOptions opts)
|
||||
throws QueryParseException {
|
||||
return new QuerySource(
|
||||
opts,
|
||||
queryBuilder.toQuery(p),
|
||||
new Sort(
|
||||
new SortField(ID_SORT_FIELD, SortField.Type.LONG, true)));
|
||||
new Sort(new SortField(ID_SORT_FIELD, SortField.Type.LONG, true)));
|
||||
}
|
||||
|
||||
private class QuerySource implements DataSource<AccountState> {
|
||||
@@ -198,8 +198,7 @@ public class LuceneAccountIndex
|
||||
}
|
||||
|
||||
private AccountState toAccountState(Document doc) {
|
||||
Account.Id id =
|
||||
new Account.Id(doc.getField(ID.getName()).numericValue().intValue());
|
||||
Account.Id id = new Account.Id(doc.getField(ID.getName()).numericValue().intValue());
|
||||
// Use the AccountCache rather than depending on any stored fields in the
|
||||
// document (of which there shouldn't be any). The most expensive part to
|
||||
// compute anyway is the effective group IDs, and we don't have a good way
|
||||
|
@@ -64,7 +64,18 @@ import com.google.gwtorm.server.ResultSet;
|
||||
import com.google.inject.Provider;
|
||||
import com.google.inject.assistedinject.Assisted;
|
||||
import com.google.inject.assistedinject.AssistedInject;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
@@ -84,30 +95,15 @@ import org.eclipse.jgit.lib.Config;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Secondary index implementation using Apache Lucene.
|
||||
* <p>
|
||||
* Writes are managed using a single {@link IndexWriter} per process, committed
|
||||
* aggressively. Reads use {@link SearcherManager} and periodically refresh,
|
||||
* though there may be some lag between a committed write and it showing up to
|
||||
* other threads' searchers.
|
||||
*
|
||||
* <p>Writes are managed using a single {@link IndexWriter} per process, committed aggressively.
|
||||
* Reads use {@link SearcherManager} and periodically refresh, though there may be some lag between
|
||||
* a committed write and it showing up to other threads' searchers.
|
||||
*/
|
||||
public class LuceneChangeIndex implements ChangeIndex {
|
||||
private static final Logger log =
|
||||
LoggerFactory.getLogger(LuceneChangeIndex.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(LuceneChangeIndex.class);
|
||||
|
||||
static final String UPDATED_SORT_FIELD = sortFieldName(ChangeField.UPDATED);
|
||||
static final String ID_SORT_FIELD = sortFieldName(ChangeField.LEGACY_ID);
|
||||
@@ -122,13 +118,10 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
private static final String MERGEABLE_FIELD = ChangeField.MERGEABLE.getName();
|
||||
private static final String PATCH_SET_FIELD = ChangeField.PATCH_SET.getName();
|
||||
private static final String REF_STATE_FIELD = ChangeField.REF_STATE.getName();
|
||||
private static final String REF_STATE_PATTERN_FIELD =
|
||||
ChangeField.REF_STATE_PATTERN.getName();
|
||||
private static final String REVIEWEDBY_FIELD =
|
||||
ChangeField.REVIEWEDBY.getName();
|
||||
private static final String REF_STATE_PATTERN_FIELD = ChangeField.REF_STATE_PATTERN.getName();
|
||||
private static final String REVIEWEDBY_FIELD = ChangeField.REVIEWEDBY.getName();
|
||||
private static final String REVIEWER_FIELD = ChangeField.REVIEWER.getName();
|
||||
private static final String HASHTAG_FIELD =
|
||||
ChangeField.HASHTAG_CASE_AWARE.getName();
|
||||
private static final String HASHTAG_FIELD = ChangeField.HASHTAG_CASE_AWARE.getName();
|
||||
private static final String STAR_FIELD = ChangeField.STAR.getName();
|
||||
private static final String SUBMIT_RECORD_LENIENT_FIELD =
|
||||
ChangeField.STORED_SUBMIT_RECORD_LENIENT.getName();
|
||||
@@ -156,43 +149,45 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
LuceneChangeIndex(
|
||||
@GerritServerConfig Config cfg,
|
||||
SitePaths sitePaths,
|
||||
@IndexExecutor(INTERACTIVE) ListeningExecutorService executor,
|
||||
@IndexExecutor(INTERACTIVE) ListeningExecutorService executor,
|
||||
Provider<ReviewDb> db,
|
||||
ChangeData.Factory changeDataFactory,
|
||||
FillArgs fillArgs,
|
||||
@Assisted Schema<ChangeData> schema) throws IOException {
|
||||
@Assisted Schema<ChangeData> schema)
|
||||
throws IOException {
|
||||
this.fillArgs = fillArgs;
|
||||
this.executor = executor;
|
||||
this.db = db;
|
||||
this.changeDataFactory = changeDataFactory;
|
||||
this.schema = schema;
|
||||
|
||||
GerritIndexWriterConfig openConfig =
|
||||
new GerritIndexWriterConfig(cfg, "changes_open");
|
||||
GerritIndexWriterConfig closedConfig =
|
||||
new GerritIndexWriterConfig(cfg, "changes_closed");
|
||||
GerritIndexWriterConfig openConfig = new GerritIndexWriterConfig(cfg, "changes_open");
|
||||
GerritIndexWriterConfig closedConfig = new GerritIndexWriterConfig(cfg, "changes_closed");
|
||||
|
||||
queryBuilder = new QueryBuilder<>(schema, openConfig.getAnalyzer());
|
||||
|
||||
SearcherFactory searcherFactory = new SearcherFactory();
|
||||
if (LuceneIndexModule.isInMemoryTest(cfg)) {
|
||||
openIndex = new ChangeSubIndex(schema, sitePaths, new RAMDirectory(),
|
||||
"ramOpen", openConfig, searcherFactory);
|
||||
closedIndex = new ChangeSubIndex(schema, sitePaths, new RAMDirectory(),
|
||||
"ramClosed", closedConfig, searcherFactory);
|
||||
openIndex =
|
||||
new ChangeSubIndex(
|
||||
schema, sitePaths, new RAMDirectory(), "ramOpen", openConfig, searcherFactory);
|
||||
closedIndex =
|
||||
new ChangeSubIndex(
|
||||
schema, sitePaths, new RAMDirectory(), "ramClosed", closedConfig, searcherFactory);
|
||||
} else {
|
||||
Path dir = LuceneVersionManager.getDir(sitePaths, CHANGES_PREFIX, schema);
|
||||
openIndex = new ChangeSubIndex(schema, sitePaths,
|
||||
dir.resolve(CHANGES_OPEN), openConfig, searcherFactory);
|
||||
closedIndex = new ChangeSubIndex(schema, sitePaths,
|
||||
dir.resolve(CHANGES_CLOSED), closedConfig, searcherFactory);
|
||||
openIndex =
|
||||
new ChangeSubIndex(
|
||||
schema, sitePaths, dir.resolve(CHANGES_OPEN), openConfig, searcherFactory);
|
||||
closedIndex =
|
||||
new ChangeSubIndex(
|
||||
schema, sitePaths, dir.resolve(CHANGES_CLOSED), closedConfig, searcherFactory);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
MoreExecutors.shutdownAndAwaitTermination(
|
||||
executor, Long.MAX_VALUE, TimeUnit.SECONDS);
|
||||
MoreExecutors.shutdownAndAwaitTermination(executor, Long.MAX_VALUE, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -217,13 +212,9 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
Document doc = openIndex.toDocument(cd, fillArgs);
|
||||
try {
|
||||
if (cd.change().getStatus().isOpen()) {
|
||||
Futures.allAsList(
|
||||
closedIndex.delete(id),
|
||||
openIndex.replace(id, doc)).get();
|
||||
Futures.allAsList(closedIndex.delete(id), openIndex.replace(id, doc)).get();
|
||||
} else {
|
||||
Futures.allAsList(
|
||||
openIndex.delete(id),
|
||||
closedIndex.replace(id, doc)).get();
|
||||
Futures.allAsList(openIndex.delete(id), closedIndex.replace(id, doc)).get();
|
||||
}
|
||||
} catch (OrmException | ExecutionException | InterruptedException e) {
|
||||
throw new IOException(e);
|
||||
@@ -234,9 +225,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
public void delete(Change.Id id) throws IOException {
|
||||
Term idTerm = LuceneChangeIndex.idTerm(id);
|
||||
try {
|
||||
Futures.allAsList(
|
||||
openIndex.delete(idTerm),
|
||||
closedIndex.delete(idTerm)).get();
|
||||
Futures.allAsList(openIndex.delete(idTerm), closedIndex.delete(idTerm)).get();
|
||||
} catch (ExecutionException | InterruptedException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
@@ -286,14 +275,12 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
private final QueryOptions opts;
|
||||
private final Sort sort;
|
||||
|
||||
|
||||
private QuerySource(List<ChangeSubIndex> indexes,
|
||||
Predicate<ChangeData> predicate, QueryOptions opts, Sort sort)
|
||||
private QuerySource(
|
||||
List<ChangeSubIndex> indexes, Predicate<ChangeData> predicate, QueryOptions opts, Sort sort)
|
||||
throws QueryParseException {
|
||||
this.indexes = indexes;
|
||||
this.predicate = predicate;
|
||||
this.query = checkNotNull(queryBuilder.toQuery(predicate),
|
||||
"null query from Lucene");
|
||||
this.query = checkNotNull(queryBuilder.toQuery(predicate), "null query from Lucene");
|
||||
this.opts = opts;
|
||||
this.sort = sort;
|
||||
}
|
||||
@@ -322,17 +309,19 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
|
||||
final Set<String> fields = IndexUtils.changeFields(opts);
|
||||
return new ChangeDataResults(
|
||||
executor.submit(new Callable<List<Document>>() {
|
||||
@Override
|
||||
public List<Document> call() throws IOException {
|
||||
return doRead(fields);
|
||||
}
|
||||
executor.submit(
|
||||
new Callable<List<Document>>() {
|
||||
@Override
|
||||
public List<Document> call() throws IOException {
|
||||
return doRead(fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return predicate.toString();
|
||||
}
|
||||
}), fields);
|
||||
@Override
|
||||
public String toString() {
|
||||
return predicate.toString();
|
||||
}
|
||||
}),
|
||||
fields);
|
||||
}
|
||||
|
||||
private List<Document> doRead(Set<String> fields) throws IOException {
|
||||
@@ -408,8 +397,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private static ListMultimap<String, IndexableField> fields(Document doc,
|
||||
Set<String> fields) {
|
||||
private static ListMultimap<String, IndexableField> fields(Document doc, Set<String> fields) {
|
||||
ListMultimap<String, IndexableField> stored =
|
||||
MultimapBuilder.hashKeys(fields.size()).arrayListValues(4).build();
|
||||
for (IndexableField f : doc) {
|
||||
@@ -421,16 +409,17 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
return stored;
|
||||
}
|
||||
|
||||
private ChangeData toChangeData(ListMultimap<String, IndexableField> doc,
|
||||
Set<String> fields, String idFieldName) {
|
||||
private ChangeData toChangeData(
|
||||
ListMultimap<String, IndexableField> doc, Set<String> fields, String idFieldName) {
|
||||
ChangeData cd;
|
||||
// Either change or the ID field was guaranteed to be included in the call
|
||||
// to fields() above.
|
||||
IndexableField cb = Iterables.getFirst(doc.get(CHANGE_FIELD), null);
|
||||
if (cb != null) {
|
||||
BytesRef proto = cb.binaryValue();
|
||||
cd = changeDataFactory.create(db.get(),
|
||||
ChangeProtoField.CODEC.decode(proto.bytes, proto.offset, proto.length));
|
||||
cd =
|
||||
changeDataFactory.create(
|
||||
db.get(), ChangeProtoField.CODEC.decode(proto.bytes, proto.offset, proto.length));
|
||||
} else {
|
||||
IndexableField f = Iterables.getFirst(doc.get(idFieldName), null);
|
||||
Change.Id id = new Change.Id(f.numericValue().intValue());
|
||||
@@ -440,8 +429,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
// disabled.
|
||||
cd = changeDataFactory.createOnlyWhenNoteDbDisabled(db.get(), id);
|
||||
} else {
|
||||
cd = changeDataFactory.create(
|
||||
db.get(), new Project.NameKey(project.stringValue()), id);
|
||||
cd = changeDataFactory.create(db.get(), new Project.NameKey(project.stringValue()), id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -469,10 +457,10 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
if (fields.contains(REVIEWER_FIELD)) {
|
||||
decodeReviewers(doc, cd);
|
||||
}
|
||||
decodeSubmitRecords(doc, SUBMIT_RECORD_STRICT_FIELD,
|
||||
ChangeField.SUBMIT_RULE_OPTIONS_STRICT, cd);
|
||||
decodeSubmitRecords(doc, SUBMIT_RECORD_LENIENT_FIELD,
|
||||
ChangeField.SUBMIT_RULE_OPTIONS_LENIENT, cd);
|
||||
decodeSubmitRecords(
|
||||
doc, SUBMIT_RECORD_STRICT_FIELD, ChangeField.SUBMIT_RULE_OPTIONS_STRICT, cd);
|
||||
decodeSubmitRecords(
|
||||
doc, SUBMIT_RECORD_LENIENT_FIELD, ChangeField.SUBMIT_RULE_OPTIONS_LENIENT, cd);
|
||||
if (fields.contains(REF_STATE_FIELD)) {
|
||||
decodeRefStates(doc, cd);
|
||||
}
|
||||
@@ -482,10 +470,8 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
return cd;
|
||||
}
|
||||
|
||||
private void decodePatchSets(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
List<PatchSet> patchSets =
|
||||
decodeProtos(doc, PATCH_SET_FIELD, PatchSetProtoField.CODEC);
|
||||
private void decodePatchSets(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
List<PatchSet> patchSets = decodeProtos(doc, PATCH_SET_FIELD, PatchSetProtoField.CODEC);
|
||||
if (!patchSets.isEmpty()) {
|
||||
// Will be an empty list for schemas prior to when this field was stored;
|
||||
// this cannot be valid since a change needs at least one patch set.
|
||||
@@ -493,20 +479,15 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeApprovals(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
cd.setCurrentApprovals(
|
||||
decodeProtos(doc, APPROVAL_FIELD, PatchSetApprovalProtoField.CODEC));
|
||||
private void decodeApprovals(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
cd.setCurrentApprovals(decodeProtos(doc, APPROVAL_FIELD, PatchSetApprovalProtoField.CODEC));
|
||||
}
|
||||
|
||||
private void decodeChangedLines(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeChangedLines(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
IndexableField added = Iterables.getFirst(doc.get(ADDED_FIELD), null);
|
||||
IndexableField deleted = Iterables.getFirst(doc.get(DELETED_FIELD), null);
|
||||
if (added != null && deleted != null) {
|
||||
cd.setChangedLines(
|
||||
added.numericValue().intValue(),
|
||||
deleted.numericValue().intValue());
|
||||
cd.setChangedLines(added.numericValue().intValue(), deleted.numericValue().intValue());
|
||||
} else {
|
||||
// No ChangedLines stored, likely due to failure during reindexing, for
|
||||
// example due to LargeObjectException. But we know the field was
|
||||
@@ -516,8 +497,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeMergeable(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeMergeable(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
IndexableField f = Iterables.getFirst(doc.get(MERGEABLE_FIELD), null);
|
||||
if (f != null) {
|
||||
String mergeable = f.stringValue();
|
||||
@@ -529,12 +509,10 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeReviewedBy(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeReviewedBy(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
Collection<IndexableField> reviewedBy = doc.get(REVIEWEDBY_FIELD);
|
||||
if (reviewedBy.size() > 0) {
|
||||
Set<Account.Id> accounts =
|
||||
Sets.newHashSetWithExpectedSize(reviewedBy.size());
|
||||
Set<Account.Id> accounts = Sets.newHashSetWithExpectedSize(reviewedBy.size());
|
||||
for (IndexableField r : reviewedBy) {
|
||||
int id = r.numericValue().intValue();
|
||||
if (reviewedBy.size() == 1 && id == ChangeField.NOT_REVIEWED) {
|
||||
@@ -546,8 +524,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeHashtags(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeHashtags(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
Collection<IndexableField> hashtag = doc.get(HASHTAG_FIELD);
|
||||
Set<String> hashtags = Sets.newHashSetWithExpectedSize(hashtag.size());
|
||||
for (IndexableField r : hashtag) {
|
||||
@@ -556,14 +533,11 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
cd.setHashtags(hashtags);
|
||||
}
|
||||
|
||||
private void decodeStar(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeStar(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
Collection<IndexableField> star = doc.get(STAR_FIELD);
|
||||
ListMultimap<Account.Id, String> stars =
|
||||
MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
ListMultimap<Account.Id, String> stars = MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
for (IndexableField r : star) {
|
||||
StarredChangesUtil.StarField starField =
|
||||
StarredChangesUtil.StarField.parse(r.stringValue());
|
||||
StarredChangesUtil.StarField starField = StarredChangesUtil.StarField.parse(r.stringValue());
|
||||
if (starField != null) {
|
||||
stars.put(starField.accountId(), starField.label());
|
||||
}
|
||||
@@ -571,35 +545,31 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
cd.setStars(stars);
|
||||
}
|
||||
|
||||
private void decodeReviewers(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeReviewers(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
cd.setReviewers(
|
||||
ChangeField.parseReviewerFieldValues(
|
||||
FluentIterable.from(doc.get(REVIEWER_FIELD))
|
||||
.transform(IndexableField::stringValue)));
|
||||
FluentIterable.from(doc.get(REVIEWER_FIELD)).transform(IndexableField::stringValue)));
|
||||
}
|
||||
|
||||
private void decodeSubmitRecords(ListMultimap<String, IndexableField> doc,
|
||||
String field, SubmitRuleOptions opts, ChangeData cd) {
|
||||
ChangeField.parseSubmitRecords(
|
||||
Collections2.transform(
|
||||
doc.get(field), f -> f.binaryValue().utf8ToString()),
|
||||
opts, cd);
|
||||
}
|
||||
|
||||
private void decodeRefStates(ListMultimap<String, IndexableField> doc,
|
||||
private void decodeSubmitRecords(
|
||||
ListMultimap<String, IndexableField> doc,
|
||||
String field,
|
||||
SubmitRuleOptions opts,
|
||||
ChangeData cd) {
|
||||
ChangeField.parseSubmitRecords(
|
||||
Collections2.transform(doc.get(field), f -> f.binaryValue().utf8ToString()), opts, cd);
|
||||
}
|
||||
|
||||
private void decodeRefStates(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
cd.setRefStates(copyAsBytes(doc.get(REF_STATE_FIELD)));
|
||||
}
|
||||
|
||||
private void decodeRefStatePatterns(ListMultimap<String, IndexableField> doc,
|
||||
ChangeData cd) {
|
||||
private void decodeRefStatePatterns(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
cd.setRefStatePatterns(copyAsBytes(doc.get(REF_STATE_PATTERN_FIELD)));
|
||||
}
|
||||
|
||||
private static <T> List<T> decodeProtos(
|
||||
ListMultimap<String, IndexableField> doc,
|
||||
String fieldName, ProtobufCodec<T> codec) {
|
||||
ListMultimap<String, IndexableField> doc, String fieldName, ProtobufCodec<T> codec) {
|
||||
Collection<IndexableField> fields = doc.get(fieldName);
|
||||
if (fields.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
@@ -614,7 +584,8 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
|
||||
private static List<byte[]> copyAsBytes(Collection<IndexableField> fields) {
|
||||
return fields.stream()
|
||||
return fields
|
||||
.stream()
|
||||
.map(
|
||||
f -> {
|
||||
BytesRef ref = f.binaryValue();
|
||||
|
@@ -32,7 +32,13 @@ import com.google.gwtorm.server.ResultSet;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provider;
|
||||
import com.google.inject.assistedinject.Assisted;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
@@ -49,18 +55,9 @@ import org.eclipse.jgit.lib.Config;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class LuceneGroupIndex extends
|
||||
AbstractLuceneIndex<AccountGroup.UUID, AccountGroup> implements GroupIndex {
|
||||
private static final Logger log =
|
||||
LoggerFactory.getLogger(LuceneGroupIndex.class);
|
||||
public class LuceneGroupIndex extends AbstractLuceneIndex<AccountGroup.UUID, AccountGroup>
|
||||
implements GroupIndex {
|
||||
private static final Logger log = LoggerFactory.getLogger(LuceneGroupIndex.class);
|
||||
|
||||
private static final String GROUPS = "groups";
|
||||
|
||||
@@ -78,13 +75,12 @@ public class LuceneGroupIndex extends
|
||||
private final QueryBuilder<AccountGroup> queryBuilder;
|
||||
private final Provider<GroupCache> groupCache;
|
||||
|
||||
private static Directory dir(Schema<AccountGroup> schema, Config cfg,
|
||||
SitePaths sitePaths) throws IOException {
|
||||
private static Directory dir(Schema<AccountGroup> schema, Config cfg, SitePaths sitePaths)
|
||||
throws IOException {
|
||||
if (LuceneIndexModule.isInMemoryTest(cfg)) {
|
||||
return new RAMDirectory();
|
||||
}
|
||||
Path indexDir =
|
||||
LuceneVersionManager.getDir(sitePaths, GROUPS + "_", schema);
|
||||
Path indexDir = LuceneVersionManager.getDir(sitePaths, GROUPS + "_", schema);
|
||||
return FSDirectory.open(indexDir);
|
||||
}
|
||||
|
||||
@@ -93,13 +89,19 @@ public class LuceneGroupIndex extends
|
||||
@GerritServerConfig Config cfg,
|
||||
SitePaths sitePaths,
|
||||
Provider<GroupCache> groupCache,
|
||||
@Assisted Schema<AccountGroup> schema) throws IOException {
|
||||
super(schema, sitePaths, dir(schema, cfg, sitePaths), GROUPS, null,
|
||||
new GerritIndexWriterConfig(cfg, GROUPS), new SearcherFactory());
|
||||
@Assisted Schema<AccountGroup> schema)
|
||||
throws IOException {
|
||||
super(
|
||||
schema,
|
||||
sitePaths,
|
||||
dir(schema, cfg, sitePaths),
|
||||
GROUPS,
|
||||
null,
|
||||
new GerritIndexWriterConfig(cfg, GROUPS),
|
||||
new SearcherFactory());
|
||||
this.groupCache = groupCache;
|
||||
|
||||
indexWriterConfig =
|
||||
new GerritIndexWriterConfig(cfg, GROUPS);
|
||||
indexWriterConfig = new GerritIndexWriterConfig(cfg, GROUPS);
|
||||
queryBuilder = new QueryBuilder<>(schema, indexWriterConfig.getAnalyzer());
|
||||
}
|
||||
|
||||
@@ -123,9 +125,11 @@ public class LuceneGroupIndex extends
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataSource<AccountGroup> getSource(Predicate<AccountGroup> p,
|
||||
QueryOptions opts) throws QueryParseException {
|
||||
return new QuerySource(opts, queryBuilder.toQuery(p),
|
||||
public DataSource<AccountGroup> getSource(Predicate<AccountGroup> p, QueryOptions opts)
|
||||
throws QueryParseException {
|
||||
return new QuerySource(
|
||||
opts,
|
||||
queryBuilder.toQuery(p),
|
||||
new Sort(new SortField(UUID_SORT_FIELD, SortField.Type.STRING, false)));
|
||||
}
|
||||
|
||||
@@ -190,8 +194,7 @@ public class LuceneGroupIndex extends
|
||||
}
|
||||
|
||||
private AccountGroup toAccountGroup(Document doc) {
|
||||
AccountGroup.UUID uuid =
|
||||
new AccountGroup.UUID(doc.getField(UUID.getName()).stringValue());
|
||||
AccountGroup.UUID uuid = new AccountGroup.UUID(doc.getField(UUID.getName()).stringValue());
|
||||
// Use the GroupCache rather than depending on any stored fields in the
|
||||
// document (of which there shouldn't be any).
|
||||
return groupCache.get().get(uuid);
|
||||
|
@@ -26,15 +26,13 @@ import com.google.gerrit.server.index.group.GroupIndex;
|
||||
import com.google.inject.Provides;
|
||||
import com.google.inject.Singleton;
|
||||
import com.google.inject.assistedinject.FactoryModuleBuilder;
|
||||
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.eclipse.jgit.lib.Config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class LuceneIndexModule extends LifecycleModule {
|
||||
public static LuceneIndexModule singleVersionAllLatest(int threads) {
|
||||
return new LuceneIndexModule(ImmutableMap.<String, Integer> of(), threads);
|
||||
return new LuceneIndexModule(ImmutableMap.<String, Integer>of(), threads);
|
||||
}
|
||||
|
||||
public static LuceneIndexModule singleVersionWithExplicitVersions(
|
||||
@@ -84,8 +82,8 @@ public class LuceneIndexModule extends LifecycleModule {
|
||||
@Provides
|
||||
@Singleton
|
||||
IndexConfig getIndexConfig(@GerritServerConfig Config cfg) {
|
||||
BooleanQuery.setMaxClauseCount(cfg.getInt("index", "maxTerms",
|
||||
BooleanQuery.getMaxClauseCount()));
|
||||
BooleanQuery.setMaxClauseCount(
|
||||
cfg.getInt("index", "maxTerms", BooleanQuery.getMaxClauseCount()));
|
||||
return IndexConfig.fromConfig(cfg);
|
||||
}
|
||||
|
||||
|
@@ -32,12 +32,6 @@ import com.google.gerrit.server.index.Schema;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.ProvisionException;
|
||||
import com.google.inject.Singleton;
|
||||
|
||||
import org.eclipse.jgit.errors.ConfigInvalidException;
|
||||
import org.eclipse.jgit.lib.Config;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
@@ -46,11 +40,14 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import org.eclipse.jgit.errors.ConfigInvalidException;
|
||||
import org.eclipse.jgit.lib.Config;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Singleton
|
||||
public class LuceneVersionManager implements LifecycleListener {
|
||||
private static final Logger log = LoggerFactory
|
||||
.getLogger(LuceneVersionManager.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(LuceneVersionManager.class);
|
||||
|
||||
private static class Version<V> {
|
||||
private final Schema<V> schema;
|
||||
@@ -58,8 +55,7 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
private final boolean exists;
|
||||
private final boolean ready;
|
||||
|
||||
private Version(Schema<V> schema, int version, boolean exists,
|
||||
boolean ready) {
|
||||
private Version(Schema<V> schema, int version, boolean exists, boolean ready) {
|
||||
checkArgument(schema == null || schema.getVersion() == version);
|
||||
this.schema = schema;
|
||||
this.version = version;
|
||||
@@ -69,8 +65,7 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
}
|
||||
|
||||
static Path getDir(SitePaths sitePaths, String prefix, Schema<?> schema) {
|
||||
return sitePaths.index_dir.resolve(String.format("%s%04d",
|
||||
prefix, schema.getVersion()));
|
||||
return sitePaths.index_dir.resolve(String.format("%s%04d", prefix, schema.getVersion()));
|
||||
}
|
||||
|
||||
private final SitePaths sitePaths;
|
||||
@@ -93,9 +88,9 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
reindexers = Maps.newHashMapWithExpectedSize(defs.size());
|
||||
onlineUpgrade = cfg.getBoolean("index", null, "onlineUpgrade", true);
|
||||
runReindexMsg =
|
||||
"No index versions ready; run java -jar " +
|
||||
sitePaths.gerrit_war.toAbsolutePath() +
|
||||
" reindex";
|
||||
"No index versions ready; run java -jar "
|
||||
+ sitePaths.gerrit_war.toAbsolutePath()
|
||||
+ " reindex";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -176,7 +171,7 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
/**
|
||||
* Start the online reindexer if the current index is not already the latest.
|
||||
*
|
||||
* @param force start re-index
|
||||
* @param force start re-index
|
||||
* @return true if started, otherwise false.
|
||||
* @throws ReindexerAlreadyRunningException
|
||||
*/
|
||||
@@ -208,24 +203,20 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isCurrentIndexVersionLatest(
|
||||
String name, OnlineReindexer<?, ?, ?> reindexer) {
|
||||
int readVersion = defs.get(name).getIndexCollection().getSearchIndex()
|
||||
.getSchema().getVersion();
|
||||
return reindexer == null
|
||||
|| reindexer.getVersion() == readVersion;
|
||||
private boolean isCurrentIndexVersionLatest(String name, OnlineReindexer<?, ?, ?> reindexer) {
|
||||
int readVersion = defs.get(name).getIndexCollection().getSearchIndex().getSchema().getVersion();
|
||||
return reindexer == null || reindexer.getVersion() == readVersion;
|
||||
}
|
||||
|
||||
private static void validateReindexerNotRunning(
|
||||
OnlineReindexer<?, ?, ?> reindexer)
|
||||
private static void validateReindexerNotRunning(OnlineReindexer<?, ?, ?> reindexer)
|
||||
throws ReindexerAlreadyRunningException {
|
||||
if (reindexer != null && reindexer.isRunning()) {
|
||||
throw new ReindexerAlreadyRunningException();
|
||||
}
|
||||
}
|
||||
|
||||
private <K, V, I extends Index<K, V>> TreeMap<Integer, Version<V>>
|
||||
scanVersions(IndexDefinition<K, V, I> def, GerritIndexStatus cfg) {
|
||||
private <K, V, I extends Index<K, V>> TreeMap<Integer, Version<V>> scanVersions(
|
||||
IndexDefinition<K, V, I> def, GerritIndexStatus cfg) {
|
||||
TreeMap<Integer, Version<V>> versions = new TreeMap<>();
|
||||
for (Schema<V> schema : def.getSchemas().values()) {
|
||||
// This part is Lucene-specific.
|
||||
@@ -235,13 +226,11 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
log.warn("Not a directory: %s", p.toAbsolutePath());
|
||||
}
|
||||
int v = schema.getVersion();
|
||||
versions.put(v, new Version<>(
|
||||
schema, v, isDir, cfg.getReady(def.getName(), v)));
|
||||
versions.put(v, new Version<>(schema, v, isDir, cfg.getReady(def.getName(), v)));
|
||||
}
|
||||
|
||||
String prefix = def.getName() + "_";
|
||||
try (DirectoryStream<Path> paths =
|
||||
Files.newDirectoryStream(sitePaths.index_dir)) {
|
||||
try (DirectoryStream<Path> paths = Files.newDirectoryStream(sitePaths.index_dir)) {
|
||||
for (Path p : paths) {
|
||||
String n = p.getFileName().toString();
|
||||
if (!n.startsWith(prefix)) {
|
||||
@@ -250,13 +239,11 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
String versionStr = n.substring(prefix.length());
|
||||
Integer v = Ints.tryParse(versionStr);
|
||||
if (v == null || versionStr.length() != 4) {
|
||||
log.warn("Unrecognized version in index directory: {}",
|
||||
p.toAbsolutePath());
|
||||
log.warn("Unrecognized version in index directory: {}", p.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
if (!versions.containsKey(v)) {
|
||||
versions.put(v, new Version<V>(
|
||||
null, v, true, cfg.getReady(def.getName(), v)));
|
||||
versions.put(v, new Version<V>(null, v, true, cfg.getReady(def.getName(), v)));
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
@@ -265,8 +252,11 @@ public class LuceneVersionManager implements LifecycleListener {
|
||||
return versions;
|
||||
}
|
||||
|
||||
private <V> void markNotReady(GerritIndexStatus cfg, String name,
|
||||
Iterable<Version<V>> versions, Collection<Version<V>> inUse) {
|
||||
private <V> void markNotReady(
|
||||
GerritIndexStatus cfg,
|
||||
String name,
|
||||
Iterable<Version<V>> versions,
|
||||
Collection<Version<V>> inUse) {
|
||||
boolean dirty = false;
|
||||
for (Version<V> v : versions) {
|
||||
if (!inUse.contains(v) && v.exists) {
|
||||
|
@@ -32,7 +32,8 @@ import com.google.gerrit.server.query.NotPredicate;
|
||||
import com.google.gerrit.server.query.OrPredicate;
|
||||
import com.google.gerrit.server.query.Predicate;
|
||||
import com.google.gerrit.server.query.QueryParseException;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
@@ -45,9 +46,6 @@ import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class QueryBuilder<V> {
|
||||
static Term intTerm(String name, int value) {
|
||||
BytesRefBuilder builder = new BytesRefBuilder();
|
||||
@@ -83,8 +81,7 @@ public class QueryBuilder<V> {
|
||||
}
|
||||
}
|
||||
|
||||
private Query or(Predicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query or(Predicate<V> p) throws QueryParseException {
|
||||
try {
|
||||
BooleanQuery.Builder q = new BooleanQuery.Builder();
|
||||
for (int i = 0; i < p.getChildCount(); i++) {
|
||||
@@ -96,8 +93,7 @@ public class QueryBuilder<V> {
|
||||
}
|
||||
}
|
||||
|
||||
private Query and(Predicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query and(Predicate<V> p) throws QueryParseException {
|
||||
try {
|
||||
BooleanQuery.Builder b = new BooleanQuery.Builder();
|
||||
List<Query> not = Lists.newArrayListWithCapacity(p.getChildCount());
|
||||
@@ -123,8 +119,7 @@ public class QueryBuilder<V> {
|
||||
}
|
||||
}
|
||||
|
||||
private Query not(Predicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query not(Predicate<V> p) throws QueryParseException {
|
||||
Predicate<V> n = p.getChild(0);
|
||||
if (n instanceof TimestampRangePredicate) {
|
||||
return notTimestamp((TimestampRangePredicate<V>) n);
|
||||
@@ -132,15 +127,16 @@ public class QueryBuilder<V> {
|
||||
|
||||
// Lucene does not support negation, start with all and subtract.
|
||||
return new BooleanQuery.Builder()
|
||||
.add(new MatchAllDocsQuery(), MUST)
|
||||
.add(toQuery(n), MUST_NOT)
|
||||
.build();
|
||||
.add(new MatchAllDocsQuery(), MUST)
|
||||
.add(toQuery(n), MUST_NOT)
|
||||
.build();
|
||||
}
|
||||
|
||||
private Query fieldQuery(IndexPredicate<V> p)
|
||||
throws QueryParseException {
|
||||
checkArgument(schema.hasField(p.getField()),
|
||||
"field not in schema v%s: %s", schema.getVersion(),
|
||||
private Query fieldQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
checkArgument(
|
||||
schema.hasField(p.getField()),
|
||||
"field not in schema v%s: %s",
|
||||
schema.getVersion(),
|
||||
p.getField().getName());
|
||||
if (p.getType() == FieldType.INTEGER) {
|
||||
return intQuery(p);
|
||||
@@ -159,8 +155,7 @@ public class QueryBuilder<V> {
|
||||
}
|
||||
}
|
||||
|
||||
private Query intQuery(IndexPredicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query intQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
int value;
|
||||
try {
|
||||
// Can't use IntPredicate because it and IndexPredicate are different
|
||||
@@ -172,49 +167,37 @@ public class QueryBuilder<V> {
|
||||
return new TermQuery(intTerm(p.getField().getName(), value));
|
||||
}
|
||||
|
||||
private Query intRangeQuery(IndexPredicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query intRangeQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
if (p instanceof IntegerRangePredicate) {
|
||||
IntegerRangePredicate<V> r =
|
||||
(IntegerRangePredicate<V>) p;
|
||||
IntegerRangePredicate<V> r = (IntegerRangePredicate<V>) p;
|
||||
int minimum = r.getMinimumValue();
|
||||
int maximum = r.getMaximumValue();
|
||||
if (minimum == maximum) {
|
||||
// Just fall back to a standard integer query.
|
||||
return new TermQuery(intTerm(p.getField().getName(), minimum));
|
||||
}
|
||||
return NumericRangeQuery.newIntRange(
|
||||
r.getField().getName(),
|
||||
minimum,
|
||||
maximum,
|
||||
true,
|
||||
true);
|
||||
return NumericRangeQuery.newIntRange(r.getField().getName(), minimum, maximum, true, true);
|
||||
}
|
||||
throw new QueryParseException("not an integer range: " + p);
|
||||
}
|
||||
|
||||
private Query timestampQuery(IndexPredicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query timestampQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
if (p instanceof TimestampRangePredicate) {
|
||||
TimestampRangePredicate<V> r =
|
||||
(TimestampRangePredicate<V>) p;
|
||||
TimestampRangePredicate<V> r = (TimestampRangePredicate<V>) p;
|
||||
return NumericRangeQuery.newLongRange(
|
||||
r.getField().getName(),
|
||||
r.getMinTimestamp().getTime(),
|
||||
r.getMaxTimestamp().getTime(),
|
||||
true, true);
|
||||
true,
|
||||
true);
|
||||
}
|
||||
throw new QueryParseException("not a timestamp: " + p);
|
||||
}
|
||||
|
||||
private Query notTimestamp(TimestampRangePredicate<V> r)
|
||||
throws QueryParseException {
|
||||
private Query notTimestamp(TimestampRangePredicate<V> r) throws QueryParseException {
|
||||
if (r.getMinTimestamp().getTime() == 0) {
|
||||
return NumericRangeQuery.newLongRange(
|
||||
r.getField().getName(),
|
||||
r.getMaxTimestamp().getTime(),
|
||||
null,
|
||||
true, true);
|
||||
r.getField().getName(), r.getMaxTimestamp().getTime(), null, true, true);
|
||||
}
|
||||
throw new QueryParseException("cannot negate: " + r);
|
||||
}
|
||||
@@ -241,17 +224,14 @@ public class QueryBuilder<V> {
|
||||
return new PrefixQuery(new Term(p.getField().getName(), p.getValue()));
|
||||
}
|
||||
|
||||
private Query fullTextQuery(IndexPredicate<V> p)
|
||||
throws QueryParseException {
|
||||
private Query fullTextQuery(IndexPredicate<V> p) throws QueryParseException {
|
||||
String value = p.getValue();
|
||||
if (value == null) {
|
||||
throw new QueryParseException(
|
||||
"Full-text search over empty string not supported");
|
||||
throw new QueryParseException("Full-text search over empty string not supported");
|
||||
}
|
||||
Query query = queryBuilder.createPhraseQuery(p.getField().getName(), value);
|
||||
if (query == null) {
|
||||
throw new QueryParseException(
|
||||
"Cannot create full-text query with value: " + value);
|
||||
throw new QueryParseException("Cannot create full-text query with value: " + value);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
@@ -17,6 +17,7 @@ package com.google.gerrit.lucene;
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
@@ -27,16 +28,13 @@ import org.apache.lucene.search.ReferenceManager;
|
||||
import org.apache.lucene.search.SearcherFactory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Utility class to safely share {@link IndexSearcher} instances across multiple
|
||||
* threads, while periodically reopening. This class ensures each searcher is
|
||||
* closed only once all threads have finished using it.
|
||||
* Utility class to safely share {@link IndexSearcher} instances across multiple threads, while
|
||||
* periodically reopening. This class ensures each searcher is closed only once all threads have
|
||||
* finished using it.
|
||||
*
|
||||
* <p>
|
||||
* Use {@link #acquire} to obtain the current searcher, and {@link #release} to
|
||||
* release it, like this:
|
||||
* <p>Use {@link #acquire} to obtain the current searcher, and {@link #release} to release it, like
|
||||
* this:
|
||||
*
|
||||
* <pre class="prettyprint">
|
||||
* IndexSearcher s = manager.acquire();
|
||||
@@ -49,15 +47,12 @@ import java.io.IOException;
|
||||
* s = null;
|
||||
* </pre>
|
||||
*
|
||||
* <p>
|
||||
* In addition you should periodically call {@link #maybeRefresh}. While it's
|
||||
* possible to call this just before running each query, this is discouraged
|
||||
* since it penalizes the unlucky queries that need to refresh. It's better to use
|
||||
* a separate background thread, that periodically calls {@link #maybeRefresh}. Finally,
|
||||
* be sure to call {@link #close} once you are done.
|
||||
* <p>In addition you should periodically call {@link #maybeRefresh}. While it's possible to call
|
||||
* this just before running each query, this is discouraged since it penalizes the unlucky queries
|
||||
* that need to refresh. It's better to use a separate background thread, that periodically calls
|
||||
* {@link #maybeRefresh}. Finally, be sure to call {@link #close} once you are done.
|
||||
*
|
||||
* @see SearcherFactory
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
// This file was copied from:
|
||||
@@ -70,28 +65,22 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
private final SearcherFactory searcherFactory;
|
||||
|
||||
/**
|
||||
* Creates and returns a new SearcherManager from the given
|
||||
* {@link IndexWriter}.
|
||||
*
|
||||
* @param writer
|
||||
* the IndexWriter to open the IndexReader from.
|
||||
* @param applyAllDeletes
|
||||
* If <code>true</code>, all buffered deletes will be applied (made
|
||||
* visible) in the {@link IndexSearcher} / {@link DirectoryReader}.
|
||||
* If <code>false</code>, the deletes may or may not be applied, but
|
||||
* remain buffered (in IndexWriter) so that they will be applied in
|
||||
* the future. Applying deletes can be costly, so if your app can
|
||||
* tolerate deleted documents being returned you might gain some
|
||||
* performance by passing <code>false</code>. See
|
||||
* {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter, boolean)}.
|
||||
* @param searcherFactory
|
||||
* An optional {@link SearcherFactory}. Pass <code>null</code> if you
|
||||
* don't require the searcher to be warmed before going live or other
|
||||
* custom behavior.
|
||||
* Creates and returns a new SearcherManager from the given {@link IndexWriter}.
|
||||
*
|
||||
* @param writer the IndexWriter to open the IndexReader from.
|
||||
* @param applyAllDeletes If <code>true</code>, all buffered deletes will be applied (made
|
||||
* visible) in the {@link IndexSearcher} / {@link DirectoryReader}. If <code>false</code>, the
|
||||
* deletes may or may not be applied, but remain buffered (in IndexWriter) so that they will
|
||||
* be applied in the future. Applying deletes can be costly, so if your app can tolerate
|
||||
* deleted documents being returned you might gain some performance by passing <code>false
|
||||
* </code>. See {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter, boolean)}.
|
||||
* @param searcherFactory An optional {@link SearcherFactory}. Pass <code>null</code> if you don't
|
||||
* require the searcher to be warmed before going live or other custom behavior.
|
||||
* @throws IOException if there is a low-level I/O error
|
||||
*/
|
||||
WrappableSearcherManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory) throws IOException {
|
||||
WrappableSearcherManager(
|
||||
IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
if (searcherFactory == null) {
|
||||
searcherFactory = new SearcherFactory();
|
||||
}
|
||||
@@ -101,11 +90,10 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
|
||||
/**
|
||||
* Creates and returns a new SearcherManager from the given {@link Directory}.
|
||||
* @param dir the directory to open the DirectoryReader on.
|
||||
* @param searcherFactory An optional {@link SearcherFactory}. Pass
|
||||
* <code>null</code> if you don't require the searcher to be warmed
|
||||
* before going live or other custom behavior.
|
||||
*
|
||||
* @param dir the directory to open the DirectoryReader on.
|
||||
* @param searcherFactory An optional {@link SearcherFactory}. Pass <code>null</code> if you don't
|
||||
* require the searcher to be warmed before going live or other custom behavior.
|
||||
* @throws IOException if there is a low-level I/O error
|
||||
*/
|
||||
WrappableSearcherManager(Directory dir, SearcherFactory searcherFactory) throws IOException {
|
||||
@@ -117,17 +105,16 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns a new SearcherManager from an existing {@link DirectoryReader}. Note that
|
||||
* Creates and returns a new SearcherManager from an existing {@link DirectoryReader}. Note that
|
||||
* this steals the incoming reference.
|
||||
*
|
||||
* @param reader the DirectoryReader.
|
||||
* @param searcherFactory An optional {@link SearcherFactory}. Pass
|
||||
* <code>null</code> if you don't require the searcher to be warmed
|
||||
* before going live or other custom behavior.
|
||||
*
|
||||
* @param searcherFactory An optional {@link SearcherFactory}. Pass <code>null</code> if you don't
|
||||
* require the searcher to be warmed before going live or other custom behavior.
|
||||
* @throws IOException if there is a low-level I/O error
|
||||
*/
|
||||
WrappableSearcherManager(DirectoryReader reader, SearcherFactory searcherFactory) throws IOException {
|
||||
WrappableSearcherManager(DirectoryReader reader, SearcherFactory searcherFactory)
|
||||
throws IOException {
|
||||
if (searcherFactory == null) {
|
||||
searcherFactory = new SearcherFactory();
|
||||
}
|
||||
@@ -143,8 +130,8 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
@Override
|
||||
protected IndexSearcher refreshIfNeeded(IndexSearcher referenceToRefresh) throws IOException {
|
||||
final IndexReader r = referenceToRefresh.getIndexReader();
|
||||
assert r instanceof DirectoryReader :
|
||||
"searcher's IndexReader should be a DirectoryReader, but got " + r;
|
||||
assert r instanceof DirectoryReader
|
||||
: "searcher's IndexReader should be a DirectoryReader, but got " + r;
|
||||
final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
|
||||
if (newReader == null) {
|
||||
return null;
|
||||
@@ -163,28 +150,30 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if no changes have occured since this searcher
|
||||
* ie. reader was opened, otherwise <code>false</code>.
|
||||
* Returns <code>true</code> if no changes have occured since this searcher ie. reader was opened,
|
||||
* otherwise <code>false</code>.
|
||||
*
|
||||
* @see DirectoryReader#isCurrent()
|
||||
*/
|
||||
public boolean isSearcherCurrent() throws IOException {
|
||||
final IndexSearcher searcher = acquire();
|
||||
try {
|
||||
final IndexReader r = searcher.getIndexReader();
|
||||
assert r instanceof DirectoryReader :
|
||||
"searcher's IndexReader should be a DirectoryReader, but got " + r;
|
||||
assert r instanceof DirectoryReader
|
||||
: "searcher's IndexReader should be a DirectoryReader, but got " + r;
|
||||
return ((DirectoryReader) r).isCurrent();
|
||||
} finally {
|
||||
release(searcher);
|
||||
}
|
||||
}
|
||||
|
||||
/** Expert: creates a searcher from the provided {@link
|
||||
* IndexReader} using the provided {@link
|
||||
* SearcherFactory}. NOTE: this decRefs incoming reader
|
||||
* on throwing an exception. */
|
||||
/**
|
||||
* Expert: creates a searcher from the provided {@link IndexReader} using the provided {@link
|
||||
* SearcherFactory}. NOTE: this decRefs incoming reader on throwing an exception.
|
||||
*/
|
||||
@SuppressWarnings("resource")
|
||||
public static IndexSearcher getSearcher(SearcherFactory searcherFactory, IndexReader reader) throws IOException {
|
||||
public static IndexSearcher getSearcher(SearcherFactory searcherFactory, IndexReader reader)
|
||||
throws IOException {
|
||||
boolean success = false;
|
||||
final IndexSearcher searcher;
|
||||
try {
|
||||
@@ -206,9 +195,11 @@ final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> {
|
||||
|
||||
if (unwrapped != reader) {
|
||||
throw new IllegalStateException(
|
||||
"SearcherFactory must wrap the provided reader (got " +
|
||||
searcher.getIndexReader() +
|
||||
" but expected " + reader + ")");
|
||||
"SearcherFactory must wrap the provided reader (got "
|
||||
+ searcher.getIndexReader()
|
||||
+ " but expected "
|
||||
+ reader
|
||||
+ ")");
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
|
Reference in New Issue
Block a user