Implement mergedafter and mergedbefore change search operator
This implementation of merged-sensitive search operator was chosen to be consistent with the existing after/before operators. Other discussed alternatives: - new operators, merged:'RELATION''TIMESTAMP' and updated:'RELATION''TIMESTAMP' - search on merged date for submitted CLs, on created date for pending CLs - add a modifier to specify the search strategy. - add a modifier for the sorting strategy. The change includes: - Index the changes on mergedOn date. - Sort the index on mergedOn before changeId. - Add mergedafter and mergedbefore operators to change query. As a future work, we could sort on merged date for the merge search operators. Change-Id: I68f36a8b8aed29a837aca84f8b24774f039fccd6
This commit is contained in:
@@ -92,6 +92,18 @@ Changes modified after the given 'TIME', inclusive. Must be in the
|
||||
format `2006-01-02[ 15:04:05[.890][ -0700]]`; omitting the time defaults
|
||||
to 00:00:00 and omitting the timezone defaults to UTC.
|
||||
|
||||
[[mergedbefore]]
|
||||
mergedbefore:'TIME'::
|
||||
+
|
||||
Changes merged before the given 'TIME'. The matching behaviour is consistent
|
||||
with `before:'TIME'`.
|
||||
|
||||
[[mergedafter]]
|
||||
mergedafter:'TIME'::
|
||||
+
|
||||
Changes merged after the given 'TIME'. The matching behaviour is consistent
|
||||
with `after:'TIME'`.
|
||||
|
||||
[[change]]
|
||||
change:'ID'::
|
||||
+
|
||||
|
||||
@@ -84,6 +84,9 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
protected static final String BULK = "_bulk";
|
||||
protected static final String MAPPINGS = "mappings";
|
||||
protected static final String ORDER = "order";
|
||||
protected static final String DESC_SORT_ORDER = "desc";
|
||||
protected static final String ASC_SORT_ORDER = "asc";
|
||||
protected static final String UNMAPPED_TYPE = "unmapped_type";
|
||||
protected static final String SEARCH = "_search";
|
||||
protected static final String SETTINGS = "settings";
|
||||
|
||||
@@ -298,7 +301,7 @@ abstract class AbstractElasticIndex<K, V> implements Index<K, V> {
|
||||
|
||||
protected JsonArray getSortArray(String idFieldName) {
|
||||
JsonObject properties = new JsonObject();
|
||||
properties.addProperty(ORDER, "asc");
|
||||
properties.addProperty(ORDER, ASC_SORT_ORDER);
|
||||
|
||||
JsonArray sortArray = new JsonArray();
|
||||
addNamedElement(idFieldName, properties, sortArray);
|
||||
|
||||
@@ -61,6 +61,9 @@ import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.assistedinject.Assisted;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.Instant;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@@ -158,14 +161,24 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
|
||||
|
||||
private JsonArray getSortArray() {
|
||||
JsonObject properties = new JsonObject();
|
||||
properties.addProperty(ORDER, "desc");
|
||||
properties.addProperty(ORDER, DESC_SORT_ORDER);
|
||||
|
||||
JsonArray sortArray = new JsonArray();
|
||||
addNamedElement(ChangeField.UPDATED.getName(), properties, sortArray);
|
||||
addNamedElement(ChangeField.MERGED_ON.getName(), getMergedOnSortOptions(), sortArray);
|
||||
addNamedElement(idField.getName(), properties, sortArray);
|
||||
return sortArray;
|
||||
}
|
||||
|
||||
private JsonObject getMergedOnSortOptions() {
|
||||
JsonObject sortOptions = new JsonObject();
|
||||
sortOptions.addProperty(ORDER, DESC_SORT_ORDER);
|
||||
// Ignore the sort field if it does not exist in index. Otherwise the search would fail on open
|
||||
// changes, because the corresponding documents do not have mergedOn field.
|
||||
sortOptions.addProperty(UNMAPPED_TYPE, ElasticMapping.TIMESTAMP_FIELD_TYPE);
|
||||
return sortOptions;
|
||||
}
|
||||
|
||||
private String getURI(List<String> types) {
|
||||
return String.join(",", types);
|
||||
}
|
||||
@@ -390,6 +403,10 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
|
||||
cd);
|
||||
}
|
||||
|
||||
if (fields.contains(ChangeField.MERGED_ON.getName())) {
|
||||
decodeMergedOn(source, cd);
|
||||
}
|
||||
|
||||
return cd;
|
||||
}
|
||||
|
||||
@@ -425,4 +442,18 @@ class ElasticChangeIndex extends AbstractElasticIndex<Change.Id, ChangeData>
|
||||
}
|
||||
out.setUnresolvedCommentCount(count.getAsInt());
|
||||
}
|
||||
|
||||
private void decodeMergedOn(JsonObject doc, ChangeData out) {
|
||||
JsonElement mergedOnField = doc.get(ChangeField.MERGED_ON.getName());
|
||||
|
||||
Timestamp mergedOn = null;
|
||||
if (mergedOnField != null) {
|
||||
// Parse from ElasticMapping.TIMESTAMP_FIELD_FORMAT.
|
||||
// We currently use built-in ISO-based dateOptionalTime.
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html#built-in-date-formats
|
||||
DateTimeFormatter isoFormatter = DateTimeFormatter.ISO_INSTANT;
|
||||
mergedOn = Timestamp.from(Instant.from(isoFormatter.parse(mergedOnField.getAsString())));
|
||||
}
|
||||
out.setMergedOn(mergedOn);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,10 @@ import com.google.gerrit.index.Schema;
|
||||
import java.util.Map;
|
||||
|
||||
class ElasticMapping {
|
||||
|
||||
protected static final String TIMESTAMP_FIELD_TYPE = "date";
|
||||
protected static final String TIMESTAMP_FIELD_FORMAT = "dateOptionalTime";
|
||||
|
||||
static MappingProperties createMapping(Schema<?> schema, ElasticQueryAdapter adapter) {
|
||||
ElasticMapping.Builder mapping = new ElasticMapping.Builder(adapter);
|
||||
for (FieldDef<?, ?> field : schema.getFields().values()) {
|
||||
@@ -71,9 +75,9 @@ class ElasticMapping {
|
||||
}
|
||||
|
||||
Builder addTimestamp(String name) {
|
||||
FieldProperties properties = new FieldProperties("date");
|
||||
properties.type = "date";
|
||||
properties.format = "dateOptionalTime";
|
||||
FieldProperties properties = new FieldProperties(TIMESTAMP_FIELD_TYPE);
|
||||
properties.type = TIMESTAMP_FIELD_TYPE;
|
||||
properties.format = TIMESTAMP_FIELD_FORMAT;
|
||||
fields.put(name, properties);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ package com.google.gerrit.lucene;
|
||||
import static com.google.common.collect.Iterables.getOnlyElement;
|
||||
import static com.google.gerrit.lucene.LuceneChangeIndex.ID2_SORT_FIELD;
|
||||
import static com.google.gerrit.lucene.LuceneChangeIndex.ID_SORT_FIELD;
|
||||
import static com.google.gerrit.lucene.LuceneChangeIndex.MERGED_ON_SORT_FIELD;
|
||||
import static com.google.gerrit.lucene.LuceneChangeIndex.UPDATED_SORT_FIELD;
|
||||
import static com.google.gerrit.server.index.change.ChangeSchemaDefinitions.NAME;
|
||||
|
||||
@@ -110,6 +111,9 @@ public class ChangeSubIndex extends AbstractLuceneIndex<Change.Id, ChangeData>
|
||||
} else if (f == ChangeField.UPDATED) {
|
||||
long t = ((Timestamp) getOnlyElement(values.getValues())).getTime();
|
||||
doc.add(new NumericDocValuesField(UPDATED_SORT_FIELD, t));
|
||||
} else if (f == ChangeField.MERGED_ON) {
|
||||
long t = ((Timestamp) getOnlyElement(values.getValues())).getTime();
|
||||
doc.add(new NumericDocValuesField(MERGED_ON_SORT_FIELD, t));
|
||||
}
|
||||
super.add(doc, values);
|
||||
}
|
||||
|
||||
@@ -72,6 +72,7 @@ import com.google.inject.assistedinject.Assisted;
|
||||
import com.google.protobuf.MessageLite;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
@@ -110,6 +111,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||
|
||||
static final String UPDATED_SORT_FIELD = sortFieldName(ChangeField.UPDATED);
|
||||
static final String MERGED_ON_SORT_FIELD = sortFieldName(ChangeField.MERGED_ON);
|
||||
static final String ID_SORT_FIELD = sortFieldName(ChangeField.LEGACY_ID);
|
||||
static final String ID2_SORT_FIELD = sortFieldName(ChangeField.LEGACY_ID_STR);
|
||||
|
||||
@@ -140,6 +142,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
private static final String UNRESOLVED_COMMENT_COUNT_FIELD =
|
||||
ChangeField.UNRESOLVED_COMMENT_COUNT.getName();
|
||||
private static final String ATTENTION_SET_FULL_FIELD = ChangeField.ATTENTION_SET_FULL.getName();
|
||||
private static final String MERGED_ON_FIELD = ChangeField.MERGED_ON.getName();
|
||||
|
||||
@FunctionalInterface
|
||||
static interface IdTerm {
|
||||
@@ -320,6 +323,7 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
private Sort getSort() {
|
||||
return new Sort(
|
||||
new SortField(UPDATED_SORT_FIELD, SortField.Type.LONG, true),
|
||||
new SortField(MERGED_ON_SORT_FIELD, SortField.Type.LONG, true),
|
||||
new SortField(idSortFieldName, SortField.Type.LONG, true));
|
||||
}
|
||||
|
||||
@@ -563,6 +567,9 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
if (fields.contains(REF_STATE_PATTERN_FIELD)) {
|
||||
decodeRefStatePatterns(doc, cd);
|
||||
}
|
||||
if (fields.contains(MERGED_ON_FIELD)) {
|
||||
decodeMergedOn(doc, cd);
|
||||
}
|
||||
|
||||
decodeUnresolvedCommentCount(doc, cd);
|
||||
decodeTotalCommentCount(doc, cd);
|
||||
@@ -719,6 +726,16 @@ public class LuceneChangeIndex implements ChangeIndex {
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeMergedOn(ListMultimap<String, IndexableField> doc, ChangeData cd) {
|
||||
IndexableField mergedOnField =
|
||||
Iterables.getFirst(doc.get(MERGED_ON_FIELD), /* defaultValue= */ null);
|
||||
Timestamp mergedOn = null;
|
||||
if (mergedOnField != null && mergedOnField.numericValue() != null) {
|
||||
mergedOn = new Timestamp(mergedOnField.numericValue().longValue());
|
||||
}
|
||||
cd.setMergedOn(mergedOn);
|
||||
}
|
||||
|
||||
private static <T> List<T> decodeProtos(
|
||||
ListMultimap<String, IndexableField> doc, String fieldName, ProtoConverter<?, T> converter) {
|
||||
return doc.get(fieldName).stream()
|
||||
|
||||
@@ -152,6 +152,12 @@ public class ChangeField {
|
||||
public static final FieldDef<ChangeData, Timestamp> UPDATED =
|
||||
timestamp("updated2").stored().build(changeGetter(Change::getLastUpdatedOn));
|
||||
|
||||
/** When this change was merged, time since January 1, 1970. */
|
||||
public static final FieldDef<ChangeData, Timestamp> MERGED_ON =
|
||||
timestamp(ChangeQueryBuilder.FIELD_MERGED_ON)
|
||||
.stored()
|
||||
.build(cd -> cd.getMergedOn().orElse(null));
|
||||
|
||||
/** List of full file paths modified in the current patch set. */
|
||||
public static final FieldDef<ChangeData, Iterable<String>> PATH =
|
||||
// Named for backwards compatibility.
|
||||
|
||||
@@ -130,9 +130,14 @@ public class ChangeSchemaDefinitions extends SchemaDefinitions<ChangeData> {
|
||||
.build();
|
||||
|
||||
/** Added new fields {@link ChangeField#MERGE} */
|
||||
@Deprecated
|
||||
static final Schema<ChangeData> V60 =
|
||||
new Schema.Builder<ChangeData>().add(V59).add(ChangeField.MERGE).build();
|
||||
|
||||
/** Added new field {@link ChangeField#MERGED_ON} */
|
||||
static final Schema<ChangeData> V61 =
|
||||
new Schema.Builder<ChangeData>().add(V60).add(ChangeField.MERGED_ON).build();
|
||||
|
||||
/**
|
||||
* Name of the change index to be used when contacting index backends or loading configurations.
|
||||
*/
|
||||
|
||||
@@ -169,6 +169,7 @@ public class ChangeQueryBuilder extends QueryBuilder<ChangeData, ChangeQueryBuil
|
||||
public static final String FIELD_LIMIT = "limit";
|
||||
public static final String FIELD_MERGE = "merge";
|
||||
public static final String FIELD_MERGEABLE = "mergeable2";
|
||||
public static final String FIELD_MERGED_ON = "mergedon";
|
||||
public static final String FIELD_MESSAGE = "message";
|
||||
public static final String FIELD_OWNER = "owner";
|
||||
public static final String FIELD_OWNERIN = "ownerin";
|
||||
@@ -205,6 +206,9 @@ public class ChangeQueryBuilder extends QueryBuilder<ChangeData, ChangeQueryBuil
|
||||
public static final String ARG_ID_OWNER = "owner";
|
||||
public static final Account.Id OWNER_ACCOUNT_ID = Account.id(0);
|
||||
|
||||
public static final String OPERATOR_MERGED_BEFORE = "mergedbefore";
|
||||
public static final String OPERATOR_MERGED_AFTER = "mergedafter";
|
||||
|
||||
// Operators to match on the last time the change was updated. Naming for legacy reasons.
|
||||
public static final String OPERATOR_BEFORE = "before";
|
||||
public static final String OPERATOR_AFTER = "after";
|
||||
@@ -493,6 +497,28 @@ public class ChangeQueryBuilder extends QueryBuilder<ChangeData, ChangeQueryBuil
|
||||
return after(value);
|
||||
}
|
||||
|
||||
@Operator
|
||||
public Predicate<ChangeData> mergedBefore(String value) throws QueryParseException {
|
||||
if (!args.index.getSchema().hasField(ChangeField.MERGED_ON)) {
|
||||
throw new QueryParseException(
|
||||
String.format(
|
||||
"'%s' operator is not supported by change index version", OPERATOR_MERGED_BEFORE));
|
||||
}
|
||||
return new BeforePredicate(
|
||||
ChangeField.MERGED_ON, ChangeQueryBuilder.OPERATOR_MERGED_BEFORE, value);
|
||||
}
|
||||
|
||||
@Operator
|
||||
public Predicate<ChangeData> mergedAfter(String value) throws QueryParseException {
|
||||
if (!args.index.getSchema().hasField(ChangeField.MERGED_ON)) {
|
||||
throw new QueryParseException(
|
||||
String.format(
|
||||
"'%s' operator is not supported by change index version", OPERATOR_MERGED_AFTER));
|
||||
}
|
||||
return new AfterPredicate(
|
||||
ChangeField.MERGED_ON, ChangeQueryBuilder.OPERATOR_MERGED_AFTER, value);
|
||||
}
|
||||
|
||||
@Operator
|
||||
public Predicate<ChangeData> change(String query) throws QueryParseException {
|
||||
Optional<ChangeTriplet> triplet = ChangeTriplet.parse(query);
|
||||
|
||||
@@ -1672,6 +1672,186 @@ public abstract class AbstractQueryChangesTest extends GerritServerTests {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedOperatorSupportedByIndexVersion() throws Exception {
|
||||
if (getSchemaVersion() < 61) {
|
||||
assertMissingField(ChangeField.MERGED_ON);
|
||||
assertFailingQuery(
|
||||
"mergedbefore:2009-10-01",
|
||||
"'mergedbefore' operator is not supported by change index version");
|
||||
assertFailingQuery(
|
||||
"mergedafter:2009-10-01",
|
||||
"'mergedafter' operator is not supported by change index version");
|
||||
} else {
|
||||
assertThat(getSchema().hasField(ChangeField.MERGED_ON)).isTrue();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void byMergedBefore() throws Exception {
|
||||
assume().that(getSchema().hasField(ChangeField.MERGED_ON)).isTrue();
|
||||
long thirtyHoursInMs = MILLISECONDS.convert(30, HOURS);
|
||||
|
||||
// Stop the clock, will set time to specific test values.
|
||||
resetTimeWithClockStep(0, MILLISECONDS);
|
||||
TestRepository<Repo> repo = createProject("repo");
|
||||
long startMs = TestTimeUtil.START.toEpochMilli();
|
||||
TestTimeUtil.setClock(new Timestamp(startMs));
|
||||
Change change1 = insert(repo, newChange(repo));
|
||||
Change change2 = insert(repo, newChange(repo));
|
||||
Change change3 = insert(repo, newChange(repo));
|
||||
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + thirtyHoursInMs));
|
||||
submit(change3);
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + 2 * thirtyHoursInMs));
|
||||
submit(change2);
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + 3 * thirtyHoursInMs));
|
||||
// Put another approval on the change, just to update it.
|
||||
approve(change1);
|
||||
approve(change3);
|
||||
|
||||
assertThat(TimeUtil.nowMs()).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change3)).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change2)).isEqualTo(startMs + 2 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change1)).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
|
||||
// Verify that:
|
||||
// 1. Change1 was not submitted and should be never returned.
|
||||
// 2. Change2 was merged on 2009-10-02 03:00:00 -0000
|
||||
// 3. Change3 was merged on 2009-10-03 09:00:00.0 -0000
|
||||
assertQuery("mergedbefore:2009-10-01");
|
||||
// Changes excluded on the date submitted.
|
||||
assertQuery("mergedbefore:2009-10-02");
|
||||
assertQuery("mergedbefore:\"2009-10-01 22:59:00 -0400\"");
|
||||
assertQuery("mergedbefore:\"2009-10-01 02:59:00\"");
|
||||
assertQuery("mergedbefore:\"2009-10-01 23:02:00 -0400\"", change3);
|
||||
assertQuery("mergedbefore:\"2009-10-02 03:02:00 -0000\"", change3);
|
||||
assertQuery("mergedbefore:\"2009-10-02 03:02:00\"", change3);
|
||||
assertQuery("mergedbefore:2009-10-03", change3);
|
||||
// Changes are sorted by lastUpdatedOn first, then by mergedOn.
|
||||
// Even though Change2 was merged after Change3, Change3 is returned first.
|
||||
assertQuery("mergedbefore:2009-10-04", change3, change2);
|
||||
|
||||
// Same test as above, but using filter code path.
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:2009-10-01"));
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:2009-10-02"));
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:\"2009-10-01 22:59:00 -0400\""));
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:\"2009-10-01 02:59:00\""));
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedbefore:\"2009-10-01 23:02:00 -0400\""), change3);
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedbefore:\"2009-10-02 03:02:00 -0000\""), change3);
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:\"2009-10-02 03:02:00\""), change3);
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:2009-10-03"), change3);
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedbefore:2009-10-04"), change3, change2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void byMergedAfter() throws Exception {
|
||||
assume().that(getSchema().hasField(ChangeField.MERGED_ON)).isTrue();
|
||||
long thirtyHoursInMs = MILLISECONDS.convert(30, HOURS);
|
||||
|
||||
// Stop the clock, will set time to specific test values.
|
||||
resetTimeWithClockStep(0, MILLISECONDS);
|
||||
TestRepository<Repo> repo = createProject("repo");
|
||||
long startMs = TestTimeUtil.START.toEpochMilli();
|
||||
TestTimeUtil.setClock(new Timestamp(startMs));
|
||||
Change change1 = insert(repo, newChange(repo));
|
||||
Change change2 = insert(repo, newChange(repo));
|
||||
Change change3 = insert(repo, newChange(repo));
|
||||
assertThat(TimeUtil.nowMs()).isEqualTo(startMs);
|
||||
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + thirtyHoursInMs));
|
||||
submit(change3);
|
||||
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + 2 * thirtyHoursInMs));
|
||||
submit(change2);
|
||||
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + 3 * thirtyHoursInMs));
|
||||
// Put another approval on the change, just to update it.
|
||||
approve(change1);
|
||||
approve(change3);
|
||||
|
||||
assertThat(TimeUtil.nowMs()).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
|
||||
assertThat(lastUpdatedMsApi(change3)).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change2)).isEqualTo(startMs + 2 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change1)).isEqualTo(startMs + 3 * thirtyHoursInMs);
|
||||
|
||||
// Verify that:
|
||||
// 1. Change1 was not submitted and should be never returned.
|
||||
// 2. Change2 was merged on 2009-10-02 03:00:00 -0000
|
||||
// 3. Change3 was merged on 2009-10-03 09:00:00.0 -0000
|
||||
assertQuery("mergedafter:2009-10-01", change3, change2);
|
||||
// Changes are sorted by lastUpdatedOn first, then by mergedOn.
|
||||
// Even though Change2 was merged after Change3, Change3 is returned first.
|
||||
assertQuery("mergedafter:\"2009-10-01 22:59:00 -0400\"", change3, change2);
|
||||
assertQuery("mergedafter:\"2009-10-02 02:59:00 -0000\"", change3, change2);
|
||||
assertQuery("mergedafter:\"2009-10-01 23:02:00 -0400\"", change2);
|
||||
assertQuery("mergedafter:\"2009-10-02 03:02:00 -0000\"", change2);
|
||||
// Changes included on the date submitted.
|
||||
assertQuery("mergedafter:2009-10-02", change3, change2);
|
||||
assertQuery("mergedafter:2009-10-03", change2);
|
||||
|
||||
// Same test as above, but using filter code path.
|
||||
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedafter:2009-10-01"), change3, change2);
|
||||
// Changes are sorted by lastUpdatedOn first, then by mergedOn.
|
||||
// Even though Change2 was merged after Change3, Change3 is returned first.
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedafter:\"2009-10-01 22:59:00 -0400\""),
|
||||
change3,
|
||||
change2);
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedafter:\"2009-10-02 02:59:00 -0000\""),
|
||||
change3,
|
||||
change2);
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedafter:\"2009-10-01 23:02:00 -0400\""), change2);
|
||||
assertQuery(
|
||||
makeIndexedPredicateFilterQuery("mergedafter:\"2009-10-02 03:02:00 -0000\""), change2);
|
||||
// Changes included on the date submitted.
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedafter:2009-10-02"), change3, change2);
|
||||
assertQuery(makeIndexedPredicateFilterQuery("mergedafter:2009-10-03"), change2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void updatedThenMergedOrder() throws Exception {
|
||||
assume().that(getSchema().hasField(ChangeField.MERGED_ON)).isTrue();
|
||||
long thirtyHoursInMs = MILLISECONDS.convert(30, HOURS);
|
||||
|
||||
// Stop the clock, will set time to specific test values.
|
||||
resetTimeWithClockStep(0, MILLISECONDS);
|
||||
TestRepository<Repo> repo = createProject("repo");
|
||||
long startMs = TestTimeUtil.START.toEpochMilli();
|
||||
TestTimeUtil.setClock(new Timestamp(startMs));
|
||||
|
||||
Change change1 = insert(repo, newChange(repo));
|
||||
Change change2 = insert(repo, newChange(repo));
|
||||
Change change3 = insert(repo, newChange(repo));
|
||||
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + thirtyHoursInMs));
|
||||
submit(change2);
|
||||
submit(change3);
|
||||
TestTimeUtil.setClock(new Timestamp(startMs + 2 * thirtyHoursInMs));
|
||||
// Approve post submit just to update lastUpdatedOn
|
||||
approve(change3);
|
||||
approve(change2);
|
||||
submit(change1);
|
||||
|
||||
// All Changes were last updated at the same time.
|
||||
assertThat(lastUpdatedMsApi(change3)).isEqualTo(startMs + 2 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change2)).isEqualTo(startMs + 2 * thirtyHoursInMs);
|
||||
assertThat(lastUpdatedMsApi(change1)).isEqualTo(startMs + 2 * thirtyHoursInMs);
|
||||
|
||||
// Changes are sorted by lastUpdatedOn first, then by mergedOn, then by Id in reverse order.
|
||||
// 1. Change3 and Change2 were merged at the same time, but Change3 ID > Change2 ID.
|
||||
// 2. Change1 ID < Change3 ID & Change2 ID but it was merged last.
|
||||
assertQuery("mergedbefore:2009-10-06", change1, change3, change2);
|
||||
assertQuery("mergedafter:2009-09-30", change1, change3, change2);
|
||||
assertQuery("status:merged", change1, change3, change2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void bySize() throws Exception {
|
||||
TestRepository<Repo> repo = createProject("repo");
|
||||
@@ -3636,6 +3816,20 @@ public abstract class AbstractQueryChangesTest extends GerritServerTests {
|
||||
return c.getLastUpdatedOn().getTime();
|
||||
}
|
||||
|
||||
// Get the last updated time from ChangeApi
|
||||
protected long lastUpdatedMsApi(Change c) throws Exception {
|
||||
return gApi.changes().id(c.getChangeId()).get().updated.getTime();
|
||||
}
|
||||
|
||||
protected void approve(Change change) throws Exception {
|
||||
gApi.changes().id(change.getChangeId()).current().review(ReviewInput.approve());
|
||||
}
|
||||
|
||||
protected void submit(Change change) throws Exception {
|
||||
approve(change);
|
||||
gApi.changes().id(change.getChangeId()).current().submit();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a search query to test {@link com.google.gerrit.index.query.Matchable} implementation
|
||||
* of change {@link IndexPredicate}
|
||||
|
||||
Reference in New Issue
Block a user