diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/PipelineTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/PipelineTest.java index 28f0252a803..0aa94dccae2 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/PipelineTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/PipelineTest.java @@ -15,8 +15,10 @@ package com.google.firebase.firestore; import static com.google.common.truth.Truth.assertThat; +import static com.google.firebase.firestore.pipeline.AggregateFunction.avg; import static com.google.firebase.firestore.pipeline.Expr.add; import static com.google.firebase.firestore.pipeline.Expr.and; +import static com.google.firebase.firestore.pipeline.Expr.array; import static com.google.firebase.firestore.pipeline.Expr.arrayContains; import static com.google.firebase.firestore.pipeline.Expr.arrayContainsAny; import static com.google.firebase.firestore.pipeline.Expr.constant; @@ -27,8 +29,10 @@ import static com.google.firebase.firestore.pipeline.Expr.field; import static com.google.firebase.firestore.pipeline.Expr.gt; import static com.google.firebase.firestore.pipeline.Expr.logicalMaximum; +import static com.google.firebase.firestore.pipeline.Expr.logicalMinimum; import static com.google.firebase.firestore.pipeline.Expr.lt; import static com.google.firebase.firestore.pipeline.Expr.lte; +import static com.google.firebase.firestore.pipeline.Expr.map; import static com.google.firebase.firestore.pipeline.Expr.mapGet; import static com.google.firebase.firestore.pipeline.Expr.neq; import static com.google.firebase.firestore.pipeline.Expr.not; @@ -38,22 +42,37 @@ import static com.google.firebase.firestore.pipeline.Expr.subtract; import static com.google.firebase.firestore.pipeline.Expr.vector; import static com.google.firebase.firestore.pipeline.Ordering.ascending; +import static com.google.firebase.firestore.pipeline.Ordering.descending; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.isRunningAgainstEmulator; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static org.junit.Assert.assertThrows; +import static org.junit.Assume.assumeFalse; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.gms.tasks.Task; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; import com.google.common.truth.Correspondence; +import com.google.firebase.Timestamp; import com.google.firebase.firestore.pipeline.AggregateFunction; +import com.google.firebase.firestore.pipeline.AggregateHints; +import com.google.firebase.firestore.pipeline.AggregateOptions; import com.google.firebase.firestore.pipeline.AggregateStage; +import com.google.firebase.firestore.pipeline.CollectionHints; +import com.google.firebase.firestore.pipeline.CollectionSourceOptions; import com.google.firebase.firestore.pipeline.Expr; import com.google.firebase.firestore.pipeline.Field; +import com.google.firebase.firestore.pipeline.FindNearestOptions; +import com.google.firebase.firestore.pipeline.FindNearestStage; +import com.google.firebase.firestore.pipeline.PipelineOptions; import com.google.firebase.firestore.pipeline.RawStage; +import com.google.firebase.firestore.pipeline.UnnestOptions; import com.google.firebase.firestore.testutil.IntegrationTestUtil; import java.util.Collections; +import java.util.Date; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.Objects; import org.junit.After; @@ -109,6 +128,10 @@ public void tearDown() { entry("rating", 4.2), entry("tags", ImmutableList.of("comedy", "space", "adventure")), entry("awards", ImmutableMap.of("hugo", true, "nebula", false)), + entry( + "embedding", + FieldValue.vector( + new double[] {10.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0})), entry( "nestedField", ImmutableMap.of("level.1", ImmutableMap.of("level.2", true))))), @@ -121,6 +144,10 @@ public void tearDown() { entry("published", 1813), entry("rating", 4.5), entry("tags", ImmutableList.of("classic", "social commentary", "love")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 10.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("none", true)))), entry( "book3", @@ -131,6 +158,10 @@ public void tearDown() { entry("published", 1967), entry("rating", 4.3), entry("tags", ImmutableList.of("family", "history", "fantasy")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 10.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("nobel", true, "nebula", false)))), entry( "book4", @@ -141,6 +172,10 @@ public void tearDown() { entry("published", 1954), entry("rating", 4.7), entry("tags", ImmutableList.of("adventure", "magic", "epic")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 10.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("hugo", false, "nebula", false)))), entry( "book5", @@ -151,6 +186,10 @@ public void tearDown() { entry("published", 1985), entry("rating", 4.1), entry("tags", ImmutableList.of("feminism", "totalitarianism", "resistance")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 10.0, 1.0, 1.0, 1.0, 1.0, 1.0})), entry( "awards", ImmutableMap.of("arthur c. clarke", true, "booker prize", false)))), entry( @@ -162,6 +201,10 @@ public void tearDown() { entry("published", 1866), entry("rating", 4.3), entry("tags", ImmutableList.of("philosophy", "crime", "redemption")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 1.0, 10.0, 1.0, 1.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("none", true)))), entry( "book7", @@ -172,6 +215,10 @@ public void tearDown() { entry("published", 1960), entry("rating", 4.2), entry("tags", ImmutableList.of("racism", "injustice", "coming-of-age")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 10.0, 1.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("pulitzer", true)))), entry( "book8", @@ -182,6 +229,10 @@ public void tearDown() { entry("published", 1949), entry("rating", 4.2), entry("tags", ImmutableList.of("surveillance", "totalitarianism", "propaganda")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 10.0, 1.0, 1.0})), entry("awards", ImmutableMap.of("prometheus", true)))), entry( "book9", @@ -192,6 +243,10 @@ public void tearDown() { entry("published", 1925), entry("rating", 4.0), entry("tags", ImmutableList.of("wealth", "american dream", "love")), + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 10.0, 1.0})), entry("awards", ImmutableMap.of("none", true)))), entry( "book10", @@ -202,7 +257,17 @@ public void tearDown() { entry("published", 1965), entry("rating", 4.6), entry("tags", ImmutableList.of("politics", "desert", "ecology")), - entry("awards", ImmutableMap.of("hugo", true, "nebula", true))))); + entry( + "embedding", + FieldValue.vector( + new double[] {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 10.0})), + entry("awards", ImmutableMap.of("hugo", true, "nebula", true)))), + entry( + "book11", + mapOfEntries( + entry("title", "Timestamp Book"), + entry("author", "Timestamp Author"), + entry("timestamp", new Date())))); @Before public void setup() { @@ -220,7 +285,7 @@ public void emptyResults() { @Test public void fullResults() { Task execute = firestore.pipeline().collection(randomCol.getPath()).execute(); - assertThat(waitFor(execute).getResults()).hasSize(10); + assertThat(waitFor(execute).getResults()).hasSize(11); } @Test @@ -233,7 +298,7 @@ public void aggregateResultsCountAll() { .execute(); assertThat(waitFor(execute).getResults()) .comparingElementsUsing(DATA_CORRESPONDENCE) - .containsExactly(ImmutableMap.of("count", 10)); + .containsExactly(ImmutableMap.of("count", 11)); } @Test @@ -246,7 +311,7 @@ public void aggregateResultsMany() { .where(eq("genre", "Science Fiction")) .aggregate( AggregateFunction.countAll().alias("count"), - AggregateFunction.avg("rating").alias("avgRating"), + avg("rating").alias("avgRating"), field("rating").maximum().alias("maxRating")) .execute(); assertThat(waitFor(execute).getResults()) @@ -263,7 +328,7 @@ public void groupAndAccumulateResults() { .collection(randomCol) .where(lt(field("published"), 1984)) .aggregate( - AggregateStage.withAccumulators(AggregateFunction.avg("rating").alias("avgRating")) + AggregateStage.withAccumulators(avg("rating").alias("avgRating")) .withGroups("genre")) .where(gt("avgRating", 4.3)) .sort(field("avgRating").descending()) @@ -286,7 +351,7 @@ public void groupAndAccumulateResultsGeneric() { .rawStage( RawStage.ofName("aggregate") .withArguments( - ImmutableMap.of("avgRating", AggregateFunction.avg("rating")), + ImmutableMap.of("avgRating", avg("rating")), ImmutableMap.of("genre", field("genre")))) .rawStage(RawStage.ofName("where").withArguments(gt("avgRating", 4.3))) .rawStage(RawStage.ofName("sort").withArguments(field("avgRating").descending())) @@ -300,7 +365,6 @@ public void groupAndAccumulateResultsGeneric() { } @Test - @Ignore("Not supported yet") public void minAndMaxAccumulations() { Task execute = firestore @@ -311,10 +375,11 @@ public void minAndMaxAccumulations() { field("rating").maximum().alias("maxRating"), field("published").minimum().alias("minPublished")) .execute(); - assertThat(waitFor(execute).getResults()) + List results = waitFor(execute).getResults(); + assertThat(results) .comparingElementsUsing(DATA_CORRESPONDENCE) .containsExactly( - mapOfEntries(entry("count", 10), entry("maxRating", 4.7), entry("minPublished", 1813))); + mapOfEntries(entry("count", 11), entry("maxRating", 4.7), entry("minPublished", 1813))); } @Test @@ -345,7 +410,8 @@ public void canSelectFields() { mapOfEntries( entry("title", "The Lord of the Rings"), entry("author", "J.R.R. Tolkien")), mapOfEntries(entry("title", "Pride and Prejudice"), entry("author", "Jane Austen")), - mapOfEntries(entry("title", "The Handmaid's Tale"), entry("author", "Margaret Atwood"))) + mapOfEntries(entry("title", "The Handmaid's Tale"), entry("author", "Margaret Atwood")), + mapOfEntries(entry("title", "Timestamp Book"), entry("author", "Timestamp Author"))) .inOrder(); } @@ -603,9 +669,7 @@ public void testTrim() { @Test public void testLike() { - if (isRunningAgainstEmulator()) { - return; - } + assumeFalse("Regexes are not supported against the emulator.", isRunningAgainstEmulator()); Task execute = firestore @@ -621,9 +685,7 @@ public void testLike() { @Test public void testRegexContains() { - if (isRunningAgainstEmulator()) { - return; - } + assumeFalse("Regexes are not supported against the emulator.", isRunningAgainstEmulator()); Task execute = firestore @@ -636,9 +698,7 @@ public void testRegexContains() { @Test public void testRegexMatches() { - if (isRunningAgainstEmulator()) { - return; - } + assumeFalse("Regexes are not supported against the emulator.", isRunningAgainstEmulator()); Task execute = firestore @@ -736,7 +796,6 @@ public void testChecks() { } @Test - @Ignore("Not supported yet") public void testLogicalMax() { Task execute = firestore @@ -753,11 +812,18 @@ public void testLogicalMax() { } @Test - @Ignore("Not supported yet") public void testLogicalMin() { Task execute = - firestore.pipeline().collection(randomCol).sort(field("rating").ascending()).execute(); - assertThat(waitFor(execute).getResults()) + firestore + .pipeline() + .collection(randomCol) + .where(field("author").eq("Douglas Adams")) + .select( + field("rating").logicalMinimum(4.5).alias("min_rating"), + logicalMinimum(field("published"), 1900).alias("min_published")) + .execute(); + List results = waitFor(execute).getResults(); + assertThat(results) .comparingElementsUsing(DATA_CORRESPONDENCE) .containsExactly(ImmutableMap.of("min_rating", 4.2, "min_published", 1900)); } @@ -871,6 +937,720 @@ public void testMapEquals() { .containsExactly("book3"); } + @Test + public void testAllDataTypes() { + Date refDate = new Date(); + Timestamp refTimestamp = Timestamp.now(); + GeoPoint refGeoPoint = new GeoPoint(1, 2); + Blob refBytes = Blob.fromBytes(new byte[] {1, 2, 3}); + + Map refMap = + mapOfEntries( + entry("number", 1L), + entry("string", "a string"), + entry("boolean", true), + entry("null", null), + entry("geoPoint", refGeoPoint), + entry("timestamp", refTimestamp), + entry("date", new Timestamp(refDate)), + entry("bytes", refBytes)); + + List refArray = + Lists.newArrayList( + 1L, + "a string", + true, + null, + refTimestamp, + refGeoPoint, + new Timestamp(refDate), + refBytes); + + Task execute = + firestore + .pipeline() + .collection(randomCol.getPath()) + .limit(1) + .select( + constant(1L).alias("number"), + constant("a string").alias("string"), + constant(true).alias("boolean"), + Expr.nullValue().alias("null"), + constant(refTimestamp).alias("timestamp"), + constant(refDate).alias("date"), + constant(refGeoPoint).alias("geoPoint"), + constant(refBytes).alias("bytes"), + map(refMap).alias("map"), + array(refArray).alias("array")) + .execute(); + + Map expectedData = new LinkedHashMap<>(); + expectedData.put("number", 1L); + expectedData.put("string", "a string"); + expectedData.put("boolean", true); + expectedData.put("null", null); + expectedData.put("timestamp", refTimestamp); + expectedData.put("date", new Timestamp(refDate)); + expectedData.put("geoPoint", refGeoPoint); + expectedData.put("bytes", refBytes); + expectedData.put("map", refMap); + expectedData.put("array", refArray); + + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(expectedData); + } + + @Test + public void testResultMetadata() { + Pipeline pipeline = firestore.pipeline().collection(randomCol.getPath()); + PipelineSnapshot snapshot = waitFor(pipeline.execute()); + assertThat(snapshot.getExecutionTime()).isNotNull(); + + for (PipelineResult result : snapshot.getResults()) { + assertThat(result.getCreateTime()).isAtMost(result.getUpdateTime()); + assertThat(result.getUpdateTime().compareTo(snapshot.getExecutionTime())).isLessThan(0); + } + + waitFor(randomCol.document("book1").update("rating", 5.0)); + snapshot = + waitFor(pipeline.where(eq("title", "The Hitchhiker's Guide to the Galaxy")).execute()); + for (PipelineResult result : snapshot.getResults()) { + assertThat(result.getCreateTime().compareTo(result.getUpdateTime())).isLessThan(0); + } + } + + @Test + public void testResultIsEqual() { + Pipeline pipeline = + firestore.pipeline().collection(randomCol.getPath()).sort(field("title").ascending()); + PipelineSnapshot snapshot1 = waitFor(pipeline.limit(1).execute()); + PipelineSnapshot snapshot2 = waitFor(pipeline.limit(1).execute()); + PipelineSnapshot snapshot3 = waitFor(pipeline.offset(1).limit(1).execute()); + + assertThat(snapshot1.getResults()).hasSize(1); + assertThat(snapshot2.getResults()).hasSize(1); + assertThat(snapshot3.getResults()).hasSize(1); + assertThat(snapshot1.getResults().get(0)).isEqualTo(snapshot2.getResults().get(0)); + assertThat(snapshot1.getResults().get(0)).isNotEqualTo(snapshot3.getResults().get(0)); + } + + @Test + public void testAggregateResultMetadata() { + Pipeline pipeline = + firestore + .pipeline() + .collection(randomCol) + .aggregate(AggregateFunction.countAll().alias("count")); + PipelineSnapshot snapshot = waitFor(pipeline.execute()); + assertThat(snapshot.getResults()).hasSize(1); + assertThat(snapshot.getExecutionTime()).isNotNull(); + + PipelineResult aggregateResult = snapshot.getResults().get(0); + assertThat(aggregateResult.getCreateTime()).isNull(); + assertThat(aggregateResult.getUpdateTime()).isNull(); + + // Ensure execution time is recent, within a tolerance. + long now = new Date().getTime(); + long executionTime = snapshot.getExecutionTime().toDate().getTime(); + assertThat(now - executionTime).isLessThan(3000); // 3 seconds tolerance + } + + @Test + public void addAndRemoveFields() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(field("author").neq("Timestamp Author")) + .addFields( + strConcat(field("author"), "_", field("title")).alias("author_title"), + strConcat(field("title"), "_", field("author")).alias("title_author")) + .removeFields("title_author", "tags", "awards", "rating", "title", "embedding") + .removeFields("published", "genre", "nestedField") + .sort(field("author_title").ascending()) + .execute(); + + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries( + entry("author_title", "Douglas Adams_The Hitchhiker's Guide to the Galaxy"), + entry("author", "Douglas Adams")), + mapOfEntries( + entry("author_title", "F. Scott Fitzgerald_The Great Gatsby"), + entry("author", "F. Scott Fitzgerald")), + mapOfEntries( + entry("author_title", "Frank Herbert_Dune"), entry("author", "Frank Herbert")), + mapOfEntries( + entry("author_title", "Fyodor Dostoevsky_Crime and Punishment"), + entry("author", "Fyodor Dostoevsky")), + mapOfEntries( + entry("author_title", "Gabriel García Márquez_One Hundred Years of Solitude"), + entry("author", "Gabriel García Márquez")), + mapOfEntries( + entry("author_title", "George Orwell_1984"), entry("author", "George Orwell")), + mapOfEntries( + entry("author_title", "Harper Lee_To Kill a Mockingbird"), + entry("author", "Harper Lee")), + mapOfEntries( + entry("author_title", "J.R.R. Tolkien_The Lord of the Rings"), + entry("author", "J.R.R. Tolkien")), + mapOfEntries( + entry("author_title", "Jane Austen_Pride and Prejudice"), + entry("author", "Jane Austen")), + mapOfEntries( + entry("author_title", "Margaret Atwood_The Handmaid's Tale"), + entry("author", "Margaret Atwood"))) + .inOrder(); + } + + @Test + public void testDistinct() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(lt("published", 1900)) + .distinct(field("genre").toLower().alias("lower_genre")) + .sort(field("lower_genre").descending()) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries(entry("lower_genre", "romance")), + mapOfEntries(entry("lower_genre", "psychological thriller"))); + } + + @Test + public void testReplaceWith() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .replaceWith("awards") + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(mapOfEntries(entry("hugo", true), entry("nebula", false))); + + execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .replaceWith( + Expr.map( + ImmutableMap.of( + "foo", "bar", "baz", Expr.map(ImmutableMap.of("title", field("title")))))) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries( + entry("foo", "bar"), + entry("baz", ImmutableMap.of("title", "The Hitchhiker's Guide to the Galaxy")))); + } + + @Test + public void testSampleLimit() { + Task execute = firestore.pipeline().collection(randomCol).sample(3).execute(); + assertThat(waitFor(execute).getResults()).hasSize(3); + } + + @Test + public void testUnion() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .union(firestore.pipeline().collection(randomCol)) + .execute(); + assertThat(waitFor(execute).getResults()).hasSize(22); + } + + @Test + public void testUnnest() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .unnest("tags", "tag") + .execute(); + assertThat(waitFor(execute).getResults()).hasSize(3); + } + + @Test + public void testPaginationWithStartAfter() { + CollectionReference paginationCollection = + IntegrationTestUtil.testCollectionWithDocs( + mapOfEntries( + entry("doc1", ImmutableMap.of("order", 1)), + entry("doc2", ImmutableMap.of("order", 2)), + entry("doc3", ImmutableMap.of("order", 3)), + entry("doc4", ImmutableMap.of("order", 4)))); + + Pipeline pipeline = + firestore.pipeline().collection(paginationCollection).sort(ascending("order")).limit(2); + + PipelineSnapshot snapshot = waitFor(pipeline.execute()); + assertThat(snapshot.getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("order", 1), ImmutableMap.of("order", 2)); + + PipelineResult lastResult = snapshot.getResults().get(snapshot.getResults().size() - 1); + Query startedAfter = paginationCollection.orderBy("order").startAfter(lastResult.get("order")); + snapshot = waitFor(firestore.pipeline().createFrom(startedAfter).execute()); + assertThat(snapshot.getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("order", 3), ImmutableMap.of("order", 4)); + } + + @Test + public void testFindNearest() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .findNearest( + "embedding", + vector(new double[] {10.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0}), + FindNearestStage.DistanceMeasure.EUCLIDEAN, + new FindNearestOptions().withLimit(2).withDistanceField("computedDistance")) + .select("title", "computedDistance") + .execute(); + List results = waitFor(execute).getResults(); + assertThat(results).hasSize(2); + assertThat(results.get(0).getData().get("title")) + .isEqualTo("The Hitchhiker's Guide to the Galaxy"); + assertThat((Double) results.get(0).getData().get("computedDistance")).isWithin(0.00001).of(1.0); + assertThat(results.get(1).getData().get("title")).isEqualTo("One Hundred Years of Solitude"); + assertThat((Double) results.get(1).getData().get("computedDistance")) + .isWithin(0.00001) + .of(12.041594578792296); + } + + @Test + public void testMoreAggregates() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .aggregate( + AggregateFunction.sum("rating").alias("sum_rating"), + AggregateFunction.count("rating").alias("count_rating"), + AggregateFunction.countDistinct("genre").alias("distinct_genres")) + .execute(); + Map result = waitFor(execute).getResults().get(0).getData(); + assertThat((Double) result.get("sum_rating")).isWithin(0.00001).of(43.1); + assertThat(result.get("count_rating")).isEqualTo(10); + assertThat(result.get("distinct_genres")).isEqualTo(8); + } + + @Test + public void testCountIfAggregate() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .aggregate(AggregateFunction.countIf(gt(field("rating"), 4.3)).alias("count")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("count", 3)); + } + + @Test + public void testStringFunctions() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .select(field("title").strReverse().alias("reversed_title"), field("author")) + .where(field("author").eq("Douglas Adams")) + .execute(); + assertThat(waitFor(execute).getResults().get(0).getData().get("reversed_title")) + .isEqualTo("yxalaG eht ot ediuG s'rekihhctiH ehT"); + + execute = + firestore + .pipeline() + .collection(randomCol) + .select( + field("author"), + field("title").strConcat("_银河系漫", "游指南").byteLength().alias("title_byte_length")) + .where(field("author").eq("Douglas Adams")) + .execute(); + assertThat(waitFor(execute).getResults().get(0).getData().get("title_byte_length")) + .isEqualTo(58); + } + + @Test + public void testStrContains() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(Expr.strContains(field("title"), "'s")) + .select("title") + .sort(field("title").ascending()) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + ImmutableMap.of("title", "The Handmaid's Tale"), + ImmutableMap.of("title", "The Hitchhiker's Guide to the Galaxy")); + } + + @Test + public void testSubstring() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Lord of the Rings")) + .select( + Expr.substr(field("title"), constant(9), constant(2)).alias("of"), + Expr.substr("title", 16, 5).alias("Rings")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("of", "of", "Rings", "Rings")); + } + + @Test + public void testLogicalAndComparisonOperators() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where( + Expr.xor( + eq("genre", "Romance"), + eq("genre", "Dystopian"), + eq("genre", "Fantasy"), + eq("published", 1949))) + .select("title") + .sort(field("title").ascending()) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + ImmutableMap.of("title", "Pride and Prejudice"), + ImmutableMap.of("title", "The Handmaid's Tale"), + ImmutableMap.of("title", "The Lord of the Rings")); + + execute = + firestore + .pipeline() + .collection(randomCol) + .where(Expr.eqAny("genre", ImmutableList.of("Romance", "Dystopian"))) + .select("title") + .sort(descending("title")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + ImmutableMap.of("title", "The Handmaid's Tale"), + ImmutableMap.of("title", "Pride and Prejudice"), + ImmutableMap.of("title", "1984")); + + execute = + firestore + .pipeline() + .collection(randomCol) + .where(Expr.notEqAny("genre", ImmutableList.of("Romance", "Dystopian"))) + .select("genre") + .distinct("genre") + .sort(ascending("genre")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + ImmutableMap.of("genre", "Fantasy"), + ImmutableMap.of("genre", "Magical Realism"), + ImmutableMap.of("genre", "Modernist"), + ImmutableMap.of("genre", "Psychological Thriller"), + ImmutableMap.of("genre", "Science Fiction"), + ImmutableMap.of("genre", "Southern Gothic")); + } + + @Test + public void testCondExpression() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(field("title").neq("Timestamp Book")) + .select( + Expr.cond(gt(field("published"), 1980), "Modern", "Classic").alias("era"), + field("title"), + field("published")) + .sort(field("published").ascending()) + .limit(2) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries( + entry("era", "Classic"), + entry("title", "Pride and Prejudice"), + entry("published", 1813)), + mapOfEntries( + entry("era", "Classic"), + entry("title", "Crime and Punishment"), + entry("published", 1866))); + } + + @Test + public void testDataManipulationExpressions() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "Timestamp Book")) + .select( + Expr.timestampAdd(field("timestamp"), "day", 1).alias("timestamp_plus_day"), + Expr.timestampSub(field("timestamp"), "hour", 1).alias("timestamp_minus_hour")) + .execute(); + List results = waitFor(execute).getResults(); + assertThat(results).hasSize(1); + Date originalTimestamp = (Date) bookDocs.get("book11").get("timestamp"); + Timestamp timestampPlusDay = (Timestamp) results.get(0).getData().get("timestamp_plus_day"); + Timestamp timestampMinusHour = (Timestamp) results.get(0).getData().get("timestamp_minus_hour"); + assertThat(timestampPlusDay.toDate().getTime() - originalTimestamp.getTime()) + .isEqualTo(24 * 60 * 60 * 1000); + assertThat(originalTimestamp.getTime() - timestampMinusHour.toDate().getTime()) + .isEqualTo(60 * 60 * 1000); + + execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .select( + Expr.arrayGet("tags", 1).alias("second_tag"), + field("awards") + .mapMerge(Expr.map(ImmutableMap.of("new_award", true))) + .alias("merged_awards")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries( + entry("second_tag", "space"), + entry( + "merged_awards", + ImmutableMap.of("hugo", true, "nebula", false, "new_award", true)))); + + execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .select( + Expr.arrayReverse("tags").alias("reversed_tags"), + Expr.mapRemove(field("awards"), "nebula").alias("removed_awards")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly( + mapOfEntries( + entry("reversed_tags", ImmutableList.of("adventure", "space", "comedy")), + entry("removed_awards", ImmutableMap.of("hugo", true)))); + } + + @Test + public void testMathExpressions() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Hitchhiker's Guide to the Galaxy")) + .select( + Expr.ceil(field("rating")).alias("ceil_rating"), + Expr.floor(field("rating")).alias("floor_rating"), + Expr.pow(field("rating"), 2).alias("pow_rating"), + Expr.round(field("rating")).alias("round_rating"), + Expr.sqrt(field("rating")).alias("sqrt_rating"), + field("published").mod(10).alias("mod_published")) + .execute(); + Map result = waitFor(execute).getResults().get(0).getData(); + assertThat((Double) result.get("ceil_rating")).isEqualTo(5.0); + assertThat((Double) result.get("floor_rating")).isEqualTo(4.0); + assertThat((Double) result.get("pow_rating")).isWithin(0.00001).of(17.64); + assertThat((Double) result.get("round_rating")).isEqualTo(4.0); + assertThat((Double) result.get("sqrt_rating")).isWithin(0.00001).of(2.04939); + assertThat(result.get("mod_published")).isEqualTo(9); + } + + @Test + public void testAdvancedMathExpressions() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .where(eq("title", "The Lord of the Rings")) + .select( + Expr.exp(field("rating")).alias("exp_rating"), + Expr.ln(field("rating")).alias("ln_rating"), + Expr.log(field("rating"), 10).alias("log_rating")) + .execute(); + Map result = waitFor(execute).getResults().get(0).getData(); + assertThat((Double) result.get("exp_rating")).isWithin(0.00001).of(109.94717); + assertThat((Double) result.get("ln_rating")).isWithin(0.00001).of(1.54756); + assertThat((Double) result.get("log_rating")).isWithin(0.00001).of(0.67209); + } + + @Test + public void testTimestampConversions() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .limit(1) + .select( + Expr.unixSecondsToTimestamp(constant(1741380235L)).alias("unixSecondsToTimestamp"), + Expr.unixMillisToTimestamp(constant(1741380235123L)).alias("unixMillisToTimestamp"), + Expr.timestampToUnixSeconds(constant(new Timestamp(1741380235L, 123456789))) + .alias("timestampToUnixSeconds"), + Expr.timestampToUnixMillis(constant(new Timestamp(1741380235L, 123456789))) + .alias("timestampToUnixMillis")) + .execute(); + Map result = waitFor(execute).getResults().get(0).getData(); + assertThat(result.get("unixSecondsToTimestamp")).isEqualTo(new Timestamp(1741380235L, 0)); + assertThat(result.get("unixMillisToTimestamp")) + .isEqualTo(new Timestamp(1741380235L, 123000000)); + assertThat(result.get("timestampToUnixSeconds")).isEqualTo(1741380235L); + assertThat(result.get("timestampToUnixMillis")).isEqualTo(1741380235123L); + } + + @Test + public void testRand() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .limit(10) + .select(Expr.rand().alias("result")) + .execute(); + List results = waitFor(execute).getResults(); + assertThat(results).hasSize(10); + for (PipelineResult result : results) { + Double randVal = (Double) result.getData().get("result"); + assertThat(randVal).isAtLeast(0.0); + assertThat(randVal).isLessThan(1.0); + } + } + + @Test + public void testVectorLength() { + Task execute = + firestore + .pipeline() + .collection(randomCol) + .limit(1) + .select( + Expr.vectorLength(Expr.vector(new double[] {1.0, 2.0, 3.0})).alias("vectorLength")) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("vectorLength", 3)); + } + + @Test + public void testDocumentsAsSource() { + Task execute = + firestore + .pipeline() + .documents( + randomCol.document("book1"), + randomCol.document("book2"), + randomCol.document("book3")) + .execute(); + assertThat(waitFor(execute).getResults()).hasSize(3); + } + + @Test + public void testCollectionGroupAsSource() { + String subcollectionId = randomCol.document().getId(); + waitFor( + randomCol.document("book1").collection(subcollectionId).add(ImmutableMap.of("order", 1))); + waitFor( + randomCol.document("book2").collection(subcollectionId).add(ImmutableMap.of("order", 2))); + Task execute = + firestore + .pipeline() + .collectionGroup(subcollectionId) + .sort(field("order").ascending()) + .execute(); + assertThat(waitFor(execute).getResults()) + .comparingElementsUsing(DATA_CORRESPONDENCE) + .containsExactly(ImmutableMap.of("order", 1), ImmutableMap.of("order", 2)); + } + + @Test + public void testErrorHandling() { + Exception exception = + assertThrows( + Exception.class, + () -> { + waitFor( + firestore + .pipeline() + .collection(randomCol) + .rawStage(RawStage.ofName("invalidStage")) + .execute()); + }); + } + + @Test + public void testCrossDatabaseRejection() { + FirebaseFirestore firestore2 = IntegrationTestUtil.testAlternateFirestore(); + CollectionReference collection2 = firestore2.collection("test-collection"); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + firestore.pipeline().collection(collection2); + }); + assertThat(exception.getMessage()).contains("Invalid CollectionReference"); + } + + @Test + public void testOptions() { + assumeFalse( + "Certain options are not supported against the emulator yet.", isRunningAgainstEmulator()); + + PipelineOptions opts = + new PipelineOptions().withIndexMode(PipelineOptions.IndexMode.RECOMMENDED); + + double[] vector = {1.0, 2.0, 3.0}; + + Pipeline pipeline = + firestore + .pipeline() + .collection( + firestore.collection("k"), + new CollectionSourceOptions() + .withHints(new CollectionHints().withForceIndex("abcdef"))) + .findNearest( + "topicVectors", + vector(vector), + FindNearestStage.DistanceMeasure.COSINE, + new FindNearestOptions().withLimit(10).withDistanceField("distance")) + .unnest(field("awards").alias("award"), new UnnestOptions().withIndexField("fgoo")) + .aggregate( + AggregateStage.withAccumulators(avg("rating").alias("avg_rating")) + .withGroups("genre"), + new AggregateOptions() + .withHints(new AggregateHints().withForceStreamableEnabled())); + + waitFor(pipeline.execute(opts)); + } + static Map.Entry entry(String key, T value) { return new Map.Entry() { private String k = key; diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/QueryToPipelineTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/QueryToPipelineTest.java index 8b40a1b98ea..df2ba833309 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/QueryToPipelineTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/QueryToPipelineTest.java @@ -67,7 +67,7 @@ public void testLimitQueries() { Query query = collection.limit(2); FirebaseFirestore db = collection.firestore; - PipelineSnapshot set = waitFor(db.pipeline().convertFrom(query).execute()); + PipelineSnapshot set = waitFor(db.pipeline().createFrom(query).execute()); List> data = pipelineSnapshotToValues(set); assertEquals(asList(map("k", "a"), map("k", "b")), data); } @@ -84,7 +84,7 @@ public void testLimitQueriesUsingDescendingSortOrder() { Query query = collection.limit(2).orderBy("sort", Direction.DESCENDING); FirebaseFirestore db = collection.firestore; - PipelineSnapshot set = waitFor(db.pipeline().convertFrom(query).execute()); + PipelineSnapshot set = waitFor(db.pipeline().createFrom(query).execute()); List> data = pipelineSnapshotToValues(set); assertEquals(asList(map("k", "d", "sort", 2L), map("k", "c", "sort", 1L)), data); @@ -97,7 +97,7 @@ public void testLimitToLastMustAlsoHaveExplicitOrderBy() { Query query = collection.limitToLast(2); expectError( - () -> waitFor(db.pipeline().convertFrom(query).execute()), + () -> waitFor(db.pipeline().createFrom(query).execute()), "limitToLast() queries require specifying at least one orderBy() clause"); } @@ -114,33 +114,33 @@ public void testLimitToLastQueriesWithCursors() { Query query = collection.limitToLast(3).orderBy("sort").endBefore(2); FirebaseFirestore db = collection.firestore; - PipelineSnapshot set = waitFor(db.pipeline().convertFrom(query).execute()); + PipelineSnapshot set = waitFor(db.pipeline().createFrom(query).execute()); List> data = pipelineSnapshotToValues(set); assertEquals( asList(map("k", "a", "sort", 0L), map("k", "b", "sort", 1L), map("k", "c", "sort", 1L)), data); query = collection.limitToLast(3).orderBy("sort").endAt(1); - set = waitFor(db.pipeline().convertFrom(query).execute()); + set = waitFor(db.pipeline().createFrom(query).execute()); data = pipelineSnapshotToValues(set); assertEquals( asList(map("k", "a", "sort", 0L), map("k", "b", "sort", 1L), map("k", "c", "sort", 1L)), data); query = collection.limitToLast(3).orderBy("sort").startAt(2); - set = waitFor(db.pipeline().convertFrom(query).execute()); + set = waitFor(db.pipeline().createFrom(query).execute()); data = pipelineSnapshotToValues(set); assertEquals(asList(map("k", "d", "sort", 2L)), data); query = collection.limitToLast(3).orderBy("sort").startAfter(0); - set = waitFor(db.pipeline().convertFrom(query).execute()); + set = waitFor(db.pipeline().createFrom(query).execute()); data = pipelineSnapshotToValues(set); assertEquals( asList(map("k", "b", "sort", 1L), map("k", "c", "sort", 1L), map("k", "d", "sort", 2L)), data); query = collection.limitToLast(3).orderBy("sort").startAfter(-1); - set = waitFor(db.pipeline().convertFrom(query).execute()); + set = waitFor(db.pipeline().createFrom(query).execute()); data = pipelineSnapshotToValues(set); assertEquals( asList(map("k", "b", "sort", 1L), map("k", "c", "sort", 1L), map("k", "d", "sort", 2L)), @@ -162,7 +162,7 @@ public void testKeyOrderIsDescendingForDescendingInequality() { Query query = collection.whereGreaterThan("foo", 21.0).orderBy("foo", Direction.DESCENDING); FirebaseFirestore db = collection.firestore; - PipelineSnapshot result = waitFor(db.pipeline().convertFrom(query).execute()); + PipelineSnapshot result = waitFor(db.pipeline().createFrom(query).execute()); assertEquals(asList("g", "f", "c", "b", "a"), pipelineSnapshotToIds(result)); } @@ -178,7 +178,7 @@ public void testUnaryFilterQueries() { PipelineSnapshot results = waitFor( db.pipeline() - .convertFrom(collection.whereEqualTo("null", null).whereEqualTo("nan", Double.NaN)) + .createFrom(collection.whereEqualTo("null", null).whereEqualTo("nan", Double.NaN)) .execute()); assertEquals(1, results.getResults().size()); PipelineResult result = results.getResults().get(0); @@ -198,7 +198,7 @@ public void testFilterOnInfinity() { PipelineSnapshot results = waitFor( db.pipeline() - .convertFrom(collection.whereEqualTo("inf", Double.POSITIVE_INFINITY)) + .createFrom(collection.whereEqualTo("inf", Double.POSITIVE_INFINITY)) .execute()); assertEquals(1, results.getResults().size()); assertEquals(asList(map("inf", Double.POSITIVE_INFINITY)), pipelineSnapshotToValues(results)); @@ -216,7 +216,7 @@ public void testCanExplicitlySortByDocumentId() { // Ideally this would be descending to validate it's different than // the default, but that requires an extra index PipelineSnapshot docs = - waitFor(db.pipeline().convertFrom(collection.orderBy(FieldPath.documentId())).execute()); + waitFor(db.pipeline().createFrom(collection.orderBy(FieldPath.documentId())).execute()); assertEquals( asList(testDocs.get("a"), testDocs.get("b"), testDocs.get("c")), pipelineSnapshotToValues(docs)); @@ -235,14 +235,14 @@ public void testCanQueryByDocumentId() { PipelineSnapshot docs = waitFor( db.pipeline() - .convertFrom(collection.whereEqualTo(FieldPath.documentId(), "ab")) + .createFrom(collection.whereEqualTo(FieldPath.documentId(), "ab")) .execute()); assertEquals(singletonList(testDocs.get("ab")), pipelineSnapshotToValues(docs)); docs = waitFor( db.pipeline() - .convertFrom( + .createFrom( collection .whereGreaterThan(FieldPath.documentId(), "aa") .whereLessThanOrEqualTo(FieldPath.documentId(), "ba")) @@ -263,7 +263,7 @@ public void testCanQueryByDocumentIdUsingRefs() { PipelineSnapshot docs = waitFor( db.pipeline() - .convertFrom( + .createFrom( collection.whereEqualTo(FieldPath.documentId(), collection.document("ab"))) .execute()); assertEquals(singletonList(testDocs.get("ab")), pipelineSnapshotToValues(docs)); @@ -271,7 +271,7 @@ public void testCanQueryByDocumentIdUsingRefs() { docs = waitFor( db.pipeline() - .convertFrom( + .createFrom( collection .whereGreaterThan(FieldPath.documentId(), collection.document("aa")) .whereLessThanOrEqualTo(FieldPath.documentId(), collection.document("ba"))) @@ -286,9 +286,9 @@ public void testCanQueryWithAndWithoutDocumentKey() { collection.add(map()); Task query1 = db.pipeline() - .convertFrom(collection.orderBy(FieldPath.documentId(), Direction.ASCENDING)) + .createFrom(collection.orderBy(FieldPath.documentId(), Direction.ASCENDING)) .execute(); - Task query2 = db.pipeline().convertFrom(collection).execute(); + Task query2 = db.pipeline().createFrom(collection).execute(); waitFor(query1); waitFor(query2); @@ -326,7 +326,7 @@ public void testQueriesCanUseNotEqualFilters() { expectedDocsMap.remove("j"); PipelineSnapshot snapshot = - waitFor(db.pipeline().convertFrom(collection.whereNotEqualTo("zip", 98101L)).execute()); + waitFor(db.pipeline().createFrom(collection.whereNotEqualTo("zip", 98101L)).execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); // With objects. @@ -337,7 +337,7 @@ public void testQueriesCanUseNotEqualFilters() { snapshot = waitFor( db.pipeline() - .convertFrom(collection.whereNotEqualTo("zip", map("code", 500))) + .createFrom(collection.whereNotEqualTo("zip", map("code", 500))) .execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); @@ -345,8 +345,7 @@ public void testQueriesCanUseNotEqualFilters() { expectedDocsMap = new LinkedHashMap<>(allDocs); expectedDocsMap.remove("i"); expectedDocsMap.remove("j"); - snapshot = - waitFor(db.pipeline().convertFrom(collection.whereNotEqualTo("zip", null)).execute()); + snapshot = waitFor(db.pipeline().createFrom(collection.whereNotEqualTo("zip", null)).execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); // With NaN. @@ -355,7 +354,7 @@ public void testQueriesCanUseNotEqualFilters() { expectedDocsMap.remove("i"); expectedDocsMap.remove("j"); snapshot = - waitFor(db.pipeline().convertFrom(collection.whereNotEqualTo("zip", Double.NaN)).execute()); + waitFor(db.pipeline().createFrom(collection.whereNotEqualTo("zip", Double.NaN)).execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); } @@ -376,7 +375,7 @@ public void testQueriesCanUseNotEqualFiltersWithDocIds() { PipelineSnapshot docs = waitFor( db.pipeline() - .convertFrom(collection.whereNotEqualTo(FieldPath.documentId(), "aa")) + .createFrom(collection.whereNotEqualTo(FieldPath.documentId(), "aa")) .execute()); assertEquals(asList(docB, docC, docD), pipelineSnapshotToValues(docs)); } @@ -396,16 +395,14 @@ public void testQueriesCanUseArrayContainsFilters() { // Search for "array" to contain 42 PipelineSnapshot snapshot = - waitFor(db.pipeline().convertFrom(collection.whereArrayContains("array", 42L)).execute()); + waitFor(db.pipeline().createFrom(collection.whereArrayContains("array", 42L)).execute()); assertEquals(asList(docA, docB, docD), pipelineSnapshotToValues(snapshot)); // Note: whereArrayContains() requires a non-null value parameter, so no null test is needed. // With NaN. snapshot = waitFor( - db.pipeline() - .convertFrom(collection.whereArrayContains("array", Double.NaN)) - .execute()); + db.pipeline().createFrom(collection.whereArrayContains("array", Double.NaN)).execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); } @@ -432,7 +429,7 @@ public void testQueriesCanUseInFilters() { PipelineSnapshot snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( collection.whereIn("zip", asList(98101L, 98103L, asList(98101L, 98102L)))) .execute()); assertEquals(asList(docA, docC, docG), pipelineSnapshotToValues(snapshot)); @@ -441,30 +438,30 @@ public void testQueriesCanUseInFilters() { snapshot = waitFor( db.pipeline() - .convertFrom(collection.whereIn("zip", asList(map("code", 500L)))) + .createFrom(collection.whereIn("zip", asList(map("code", 500L)))) .execute()); assertEquals(asList(docF), pipelineSnapshotToValues(snapshot)); // With null. - snapshot = waitFor(db.pipeline().convertFrom(collection.whereIn("zip", nullList())).execute()); + snapshot = waitFor(db.pipeline().createFrom(collection.whereIn("zip", nullList())).execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); // With null and a value. List inputList = nullList(); inputList.add(98101L); - snapshot = waitFor(db.pipeline().convertFrom(collection.whereIn("zip", inputList)).execute()); + snapshot = waitFor(db.pipeline().createFrom(collection.whereIn("zip", inputList)).execute()); assertEquals(asList(docA), pipelineSnapshotToValues(snapshot)); // With NaN. snapshot = - waitFor(db.pipeline().convertFrom(collection.whereIn("zip", asList(Double.NaN))).execute()); + waitFor(db.pipeline().createFrom(collection.whereIn("zip", asList(Double.NaN))).execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); // With NaN and a value. snapshot = waitFor( db.pipeline() - .convertFrom(collection.whereIn("zip", asList(Double.NaN, 98101L))) + .createFrom(collection.whereIn("zip", asList(Double.NaN, 98101L))) .execute()); assertEquals(asList(docA), pipelineSnapshotToValues(snapshot)); } @@ -486,7 +483,7 @@ public void testQueriesCanUseInFiltersWithDocIds() { PipelineSnapshot docs = waitFor( db.pipeline() - .convertFrom(collection.whereIn(FieldPath.documentId(), asList("aa", "ab"))) + .createFrom(collection.whereIn(FieldPath.documentId(), asList("aa", "ab"))) .execute()); assertEquals(asList(docA, docB), pipelineSnapshotToValues(docs)); } @@ -524,7 +521,7 @@ public void testQueriesCanUseNotInFilters() { PipelineSnapshot snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( collection.whereNotIn("zip", asList(98101L, 98103L, asList(98101L, 98102L)))) .execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); @@ -537,13 +534,13 @@ public void testQueriesCanUseNotInFilters() { snapshot = waitFor( db.pipeline() - .convertFrom(collection.whereNotIn("zip", asList(map("code", 500L)))) + .createFrom(collection.whereNotIn("zip", asList(map("code", 500L)))) .execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); // With Null. snapshot = - waitFor(db.pipeline().convertFrom(collection.whereNotIn("zip", nullList())).execute()); + waitFor(db.pipeline().createFrom(collection.whereNotIn("zip", nullList())).execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); // With NaN. @@ -553,7 +550,7 @@ public void testQueriesCanUseNotInFilters() { expectedDocsMap.remove("j"); snapshot = waitFor( - db.pipeline().convertFrom(collection.whereNotIn("zip", asList(Double.NaN))).execute()); + db.pipeline().createFrom(collection.whereNotIn("zip", asList(Double.NaN))).execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); // With NaN and a number. @@ -565,7 +562,7 @@ public void testQueriesCanUseNotInFilters() { snapshot = waitFor( db.pipeline() - .convertFrom(collection.whereNotIn("zip", asList(Float.NaN, 98101L))) + .createFrom(collection.whereNotIn("zip", asList(Float.NaN, 98101L))) .execute()); assertEquals(Lists.newArrayList(expectedDocsMap.values()), pipelineSnapshotToValues(snapshot)); } @@ -587,7 +584,7 @@ public void testQueriesCanUseNotInFiltersWithDocIds() { PipelineSnapshot docs = waitFor( db.pipeline() - .convertFrom(collection.whereNotIn(FieldPath.documentId(), asList("aa", "ab"))) + .createFrom(collection.whereNotIn(FieldPath.documentId(), asList("aa", "ab"))) .execute()); assertEquals(asList(docC, docD), pipelineSnapshotToValues(docs)); } @@ -613,38 +610,38 @@ public void testQueriesCanUseArrayContainsAnyFilters() { // Search for "array" to contain [42, 43]. Pipeline pipeline = - db.pipeline().convertFrom(collection.whereArrayContainsAny("array", asList(42L, 43L))); + db.pipeline().createFrom(collection.whereArrayContainsAny("array", asList(42L, 43L))); PipelineSnapshot snapshot = waitFor(pipeline.execute()); assertEquals(asList(docA, docB, docD, docE), pipelineSnapshotToValues(snapshot)); // With objects. pipeline = - db.pipeline().convertFrom(collection.whereArrayContainsAny("array", asList(map("a", 42L)))); + db.pipeline().createFrom(collection.whereArrayContainsAny("array", asList(map("a", 42L)))); snapshot = waitFor(pipeline.execute()); assertEquals(asList(docF), pipelineSnapshotToValues(snapshot)); // With null. - pipeline = db.pipeline().convertFrom(collection.whereArrayContainsAny("array", nullList())); + pipeline = db.pipeline().createFrom(collection.whereArrayContainsAny("array", nullList())); snapshot = waitFor(pipeline.execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); // With null and a value. List inputList = nullList(); inputList.add(43L); - pipeline = db.pipeline().convertFrom(collection.whereArrayContainsAny("array", inputList)); + pipeline = db.pipeline().createFrom(collection.whereArrayContainsAny("array", inputList)); snapshot = waitFor(pipeline.execute()); assertEquals(asList(docE), pipelineSnapshotToValues(snapshot)); // With NaN. pipeline = - db.pipeline().convertFrom(collection.whereArrayContainsAny("array", asList(Double.NaN))); + db.pipeline().createFrom(collection.whereArrayContainsAny("array", asList(Double.NaN))); snapshot = waitFor(pipeline.execute()); assertEquals(new ArrayList<>(), pipelineSnapshotToValues(snapshot)); // With NaN and a value. pipeline = db.pipeline() - .convertFrom(collection.whereArrayContainsAny("array", asList(Double.NaN, 43L))); + .createFrom(collection.whereArrayContainsAny("array", asList(Double.NaN, 43L))); snapshot = waitFor(pipeline.execute()); assertEquals(asList(docE), pipelineSnapshotToValues(snapshot)); } @@ -677,7 +674,7 @@ public void testCollectionGroupQueries() { waitFor(batch.commit()); PipelineSnapshot snapshot = - waitFor(db.pipeline().convertFrom(db.collectionGroup(collectionGroup)).execute()); + waitFor(db.pipeline().createFrom(db.collectionGroup(collectionGroup)).execute()); assertEquals( asList("cg-doc1", "cg-doc2", "cg-doc3", "cg-doc4", "cg-doc5"), pipelineSnapshotToIds(snapshot)); @@ -709,7 +706,7 @@ public void testCollectionGroupQueriesWithStartAtEndAtWithArbitraryDocumentIds() PipelineSnapshot snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( db.collectionGroup(collectionGroup) .orderBy(FieldPath.documentId()) .startAt("a/b") @@ -720,7 +717,7 @@ public void testCollectionGroupQueriesWithStartAtEndAtWithArbitraryDocumentIds() snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( db.collectionGroup(collectionGroup) .orderBy(FieldPath.documentId()) .startAfter("a/b") @@ -755,7 +752,7 @@ public void testCollectionGroupQueriesWithWhereFiltersOnArbitraryDocumentIds() { PipelineSnapshot snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( db.collectionGroup(collectionGroup) .whereGreaterThanOrEqualTo(FieldPath.documentId(), "a/b") .whereLessThanOrEqualTo(FieldPath.documentId(), "a/b0")) @@ -765,7 +762,7 @@ public void testCollectionGroupQueriesWithWhereFiltersOnArbitraryDocumentIds() { snapshot = waitFor( db.pipeline() - .convertFrom( + .createFrom( db.collectionGroup(collectionGroup) .whereGreaterThan(FieldPath.documentId(), "a/b") .whereLessThan( diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java index 92628527d25..722b091d31c 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java @@ -599,7 +599,7 @@ public static void checkQueryAndPipelineResultsMatch(Query query, String... expe } PipelineSnapshot docsFromPipeline; try { - docsFromPipeline = waitFor(query.getFirestore().pipeline().convertFrom(query).execute()); + docsFromPipeline = waitFor(query.getFirestore().pipeline().createFrom(query).execute()); } catch (Exception e) { throw new RuntimeException("Pipeline FAILED", e); } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/AggregateField.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/AggregateField.java index a053a8d038a..76fa908eb30 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/AggregateField.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/AggregateField.java @@ -20,7 +20,7 @@ import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import com.google.firebase.firestore.pipeline.AggregateFunction; -import com.google.firebase.firestore.pipeline.AggregateWithAlias; +import com.google.firebase.firestore.pipeline.AliasedAggregate; import java.util.Objects; /** Represents an aggregation that can be performed by Firestore. */ @@ -66,7 +66,7 @@ public String getOperator() { } @NonNull - abstract AggregateWithAlias toPipeline(); + abstract AliasedAggregate toPipeline(); /** * Returns true if the given object is equal to this object. Two `AggregateField` objects are @@ -205,7 +205,7 @@ private CountAggregateField() { @NonNull @Override - AggregateWithAlias toPipeline() { + AliasedAggregate toPipeline() { return AggregateFunction.countAll().alias(getAlias()); } } @@ -218,7 +218,7 @@ private SumAggregateField(@NonNull FieldPath fieldPath) { @NonNull @Override - AggregateWithAlias toPipeline() { + AliasedAggregate toPipeline() { return field(getFieldPath()).sum().alias(getAlias()); } } @@ -231,7 +231,7 @@ private AverageAggregateField(@NonNull FieldPath fieldPath) { @NonNull @Override - AggregateWithAlias toPipeline() { + AliasedAggregate toPipeline() { return field(getFieldPath()).avg().alias(getAlias()); } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java index 81f2fc9323b..08d5176dac3 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java @@ -885,7 +885,11 @@ static void setClientLanguage(@NonNull String languageToken) { } /** - * Build a new Pipeline + * Builds a new Pipeline from this Firestore instance. + * + * NOTE: Pipeline does not have realtime updates support and SDK cache access, it completely relies + * on the connection to the server for the results, and does not augment the results with the SDK + * cache. To get realtime updates and SDK cache access use {@code realTimePipeline()} instead. * * @return {@code PipelineSource} for this Firestore instance. */ @@ -896,7 +900,13 @@ public PipelineSource pipeline() { } /** - * Build a new RealtimePipeline + * Build a new RealtimePipeline from this Firestore instance. + * + * NOTE: RealtimePipeline utilizes the Firestore realtime backend and SDK cache to provide final + * results, this is the equivalent to classic Firestore {@link Query}, but with more features + * supported. However, its feature set is only a subset of {@code Pipeline}. If you need features + * unavailable in {@code RealtimePipeline} and realtime or SDK cache access are not a must, use + * {@code pipeline()} instead. * * @return {@code RealtimePipelineSource} for this Firestore instance. */ diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Pipeline.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/Pipeline.kt index f852b8549d7..5ad811d518f 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/Pipeline.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Pipeline.kt @@ -23,24 +23,29 @@ import com.google.firebase.firestore.model.ResourcePath import com.google.firebase.firestore.model.Values import com.google.firebase.firestore.pipeline.AddFieldsStage import com.google.firebase.firestore.pipeline.AggregateFunction +import com.google.firebase.firestore.pipeline.AggregateOptions import com.google.firebase.firestore.pipeline.AggregateStage -import com.google.firebase.firestore.pipeline.AggregateWithAlias +import com.google.firebase.firestore.pipeline.AliasedAggregate +import com.google.firebase.firestore.pipeline.AliasedExpr import com.google.firebase.firestore.pipeline.BooleanExpr +import com.google.firebase.firestore.pipeline.CollectionGroupOptions import com.google.firebase.firestore.pipeline.CollectionGroupSource import com.google.firebase.firestore.pipeline.CollectionSource +import com.google.firebase.firestore.pipeline.CollectionSourceOptions import com.google.firebase.firestore.pipeline.DatabaseSource import com.google.firebase.firestore.pipeline.DistinctStage import com.google.firebase.firestore.pipeline.DocumentsSource import com.google.firebase.firestore.pipeline.Expr import com.google.firebase.firestore.pipeline.Expr.Companion.field -import com.google.firebase.firestore.pipeline.ExprWithAlias import com.google.firebase.firestore.pipeline.Field +import com.google.firebase.firestore.pipeline.FindNearestOptions import com.google.firebase.firestore.pipeline.FindNearestStage import com.google.firebase.firestore.pipeline.FunctionExpr import com.google.firebase.firestore.pipeline.InternalOptions import com.google.firebase.firestore.pipeline.LimitStage import com.google.firebase.firestore.pipeline.OffsetStage import com.google.firebase.firestore.pipeline.Ordering +import com.google.firebase.firestore.pipeline.PipelineOptions import com.google.firebase.firestore.pipeline.RawStage import com.google.firebase.firestore.pipeline.RemoveFieldsStage import com.google.firebase.firestore.pipeline.ReplaceStage @@ -50,8 +55,11 @@ import com.google.firebase.firestore.pipeline.Selectable import com.google.firebase.firestore.pipeline.SortStage import com.google.firebase.firestore.pipeline.Stage import com.google.firebase.firestore.pipeline.UnionStage +import com.google.firebase.firestore.pipeline.UnnestOptions import com.google.firebase.firestore.pipeline.UnnestStage import com.google.firebase.firestore.pipeline.WhereStage +import com.google.firebase.firestore.remote.RemoteSerializer +import com.google.firebase.firestore.util.Logger import com.google.firestore.v1.ExecutePipelineRequest import com.google.firestore.v1.StructuredPipeline import com.google.firestore.v1.Value @@ -92,9 +100,25 @@ internal constructor( return builder.build() } - fun execute(options: InternalOptions?): Task { + /** + * Executes this pipeline and returns the results as a [Task] of [PipelineSnapshot]. + * + * @return A [Task] that will be resolved with the results of the pipeline. + */ + fun execute(): Task = execute(null) + + /** + * Executes this pipeline and returns the results as a [Task] of [PipelineSnapshot]. + * + * @param options The [PipelineOptions] to use to instruct Firestore backend execution. + * @return A [Task] that will be resolved with the results of the pipeline. + */ + fun execute(options: PipelineOptions): Task = execute(options.options) + + internal fun execute(options: InternalOptions?): Task { val request = toExecutePipelineRequest(options) val observerTask = ObserverSnapshotTask() + Logger.warn("Pipeline", "Executing pipeline: $request") firestore?.callClient { call -> call!!.executePipeline(request, observerTask) } return observerTask.task } @@ -133,8 +157,6 @@ internal constructor( get() = taskCompletionSource.task } - fun execute(): Task = execute(null) - internal fun documentReference(key: DocumentKey): DocumentReference { return DocumentReference(key, firestore) } @@ -161,9 +183,18 @@ internal constructor( * The added fields are defined using [Selectable]s, which can be: * * - [Field]: References an existing document field. - * - [ExprWithAlias]: Represents the result of a expression with an assigned alias name using + * - [AliasedExpr]: Represents the result of a expression with an assigned alias name using * [Expr.alias] * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .addFields( + * field("rating").as("bookRating"), // Rename 'rating' to 'bookRating' + * add(5, field("quantity")).as("totalCost") // Calculate 'totalCost' + * ); + * ``` + * * @param field The first field to add to the documents, specified as a [Selectable]. * @param additionalFields The fields to add to the documents, specified as [Selectable]s. * @return A new [Pipeline] object with this stage appended to the stage list. @@ -174,6 +205,14 @@ internal constructor( /** * Remove fields from outputs of previous stages. * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .removeFields( + * field("rating"), field("cost") + * ); + * ``` + * * @param field The first [Field] to remove. * @param additionalFields Optional additional [Field]s to remove. * @return A new [Pipeline] object with this stage appended to the stage list. @@ -184,6 +223,14 @@ internal constructor( /** * Remove fields from outputs of previous stages. * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .removeFields( + * "rating", "cost" + * ); + * ``` + * * @param field The first [String] name of field to remove. * @param additionalFields Optional additional [String] name of fields to remove. * @return A new [Pipeline] object with this stage appended to the stage list. @@ -200,12 +247,21 @@ internal constructor( * * - [String]: Name of an existing field * - [Field]: Reference to an existing field. - * - [ExprWithAlias]: Represents the result of a expression with an assigned alias name using + * - [AliasedExpr]: Represents the result of a expression with an assigned alias name using * [Expr.alias] * * If no selections are provided, the output of this stage is empty. Use [Pipeline.addFields] * instead if only additions are desired. * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .select( + * field("name"), + * field("address").toUppercase().as("upperAddress"), + * ); + * ``` + * * @param selection The first field to include in the output documents, specified as a * [Selectable] expression. * @param additionalSelections Optional additional fields to include in the output documents, @@ -222,12 +278,22 @@ internal constructor( * * - [String]: Name of an existing field * - [Field]: Reference to an existing field. - * - [ExprWithAlias]: Represents the result of a expression with an assigned alias name using + * - [AliasedExpr]: Represents the result of a expression with an assigned alias name using * [Expr.alias] * * If no selections are provided, the output of this stage is empty. Use [Pipeline.addFields] * instead if only additions are desired. * + * Example: + * ``` + * firestore.collection("books") + * .select("name", "address"); + * + * // The above is a shorthand of this: + * firestore.pipeline().collection("books") + * .select(field("name"), field("address")); + * ``` + * * @param fieldName The first field to include in the output documents, specified as a string * value representing a field names. * @param additionalSelections Optional additional fields to include in the output documents, @@ -246,6 +312,16 @@ internal constructor( * all orderings result in equal comparison, the documents are considered equal and the order is * unspecified. * + * Example: + * ``` + * // Sort books by rating in descending order, and then by title in ascending order for books with the same rating + * firestore.pipeline().collection("books") + * .sort( + * Ordering.of("rating").descending(), + * Ordering.of("title") // Ascending order is the default + * ); + * ``` + * * @param order The first [Ordering] instance specifying the sorting criteria. * @param additionalOrders Optional additional [Ordering] instances specifying the sorting * criteria. @@ -267,6 +343,17 @@ internal constructor( * - logical operators: [Expr.and], [Expr.or], [Expr.not], etc. * - advanced functions: [Expr.regexMatch], [Expr.arrayContains], etc. * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .where( + * and( + * gt("rating", 4.0), // Filter for ratings greater than 4.0 + * field("genre").eq("Science Fiction") // Equivalent to eq("genre", "Science Fiction") + * ) + * ); + * ``` + * * @param condition The [BooleanExpr] to apply. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -279,6 +366,15 @@ internal constructor( * results in chunks. It is typically used in conjunction with [limit] to control the size of each * page. * + * Example: + * ``` + * // Retrieve the second page of 20 results + * firestore.pipeline().collection("books") + * .sort(field("published").descending()) + * .offset(20) // Skip the first 20 results + * .limit(20); // Take the next 20 results + * ``` + * * @param offset The number of documents to skip. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -294,6 +390,14 @@ internal constructor( * - **Limiting Data Retrieval:** To prevent excessive data transfer and improve performance, * especially when dealing with large collections. * + * Example: + * ``` + * // Limit the results to the top 10 highest-rated books + * firestore.pipeline().collection("books") + * .sort(field("rating").descending()) + * .limit(10); + * ``` + * * @param limit The maximum number of documents to return. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -309,9 +413,17 @@ internal constructor( * * - [String]: Name of an existing field * - [Field]: References an existing document field. - * - [ExprWithAlias]: Represents the result of a function with an assigned alias name using + * - [AliasedExpr]: Represents the result of a function with an assigned alias name using * [Expr.alias] * + * Example: + * ``` + * // Get a list of unique author names in uppercase and genre combinations. + * firestore.pipeline().collection("books") + * .distinct(toUppercase(field("author")).as("authorName"), field("genre")) + * .select("authorName"); + * ``` + * * @param group The [Selectable] expression to consider when determining distinct value * combinations. * @param additionalGroups The [Selectable] expressions to consider when determining distinct @@ -333,9 +445,16 @@ internal constructor( * * - [String]: Name of an existing field * - [Field]: References an existing document field. - * - [ExprWithAlias]: Represents the result of a function with an assigned alias name using + * - [AliasedExpr]: Represents the result of a function with an assigned alias name using * [Expr.alias] * + * Example: + * ``` + * // Get a list of unique genres. + * firestore.pipeline().collection("books") + * .distinct("genre"); + * ``` + * * @param groupField The [String] representing field name. * @param additionalGroups The [Selectable] expressions to consider when determining distinct * value combinations or [String]s representing field names. @@ -352,18 +471,28 @@ internal constructor( * Performs aggregation operations on the documents from previous stages. * * This stage allows you to calculate aggregate values over a set of documents. You define the - * aggregations to perform using [AggregateWithAlias] expressions which are typically results of + * aggregations to perform using [AliasedAggregate] expressions which are typically results of * calling [AggregateFunction.alias] on [AggregateFunction] instances. * - * @param accumulator The first [AggregateWithAlias] expression, wrapping an [AggregateFunction] + * Example: + * ``` + * // Calculate the average rating and the total number of books + * firestore.pipeline().collection("books") + * .aggregate( + * field("rating").avg().as("averageRating"), + * countAll().as("totalBooks") + * ); + * ``` + * + * @param accumulator The first [AliasedAggregate] expression, wrapping an [AggregateFunction] * with an alias for the accumulated results. - * @param additionalAccumulators The [AggregateWithAlias] expressions, each wrapping an + * @param additionalAccumulators The [AliasedAggregate] expressions, each wrapping an * [AggregateFunction] with an alias for the accumulated results. * @return A new [Pipeline] object with this stage appended to the stage list. */ fun aggregate( - accumulator: AggregateWithAlias, - vararg additionalAccumulators: AggregateWithAlias + accumulator: AliasedAggregate, + vararg additionalAccumulators: AliasedAggregate ): Pipeline = append(AggregateStage.withAccumulators(accumulator, *additionalAccumulators)) /** @@ -378,15 +507,50 @@ internal constructor( * groups is the same as putting the entire inputs into one group. * * - **AggregateFunctions:** One or more accumulation operations to perform within each group. - * These are defined using [AggregateWithAlias] expressions, which are typically created by - * calling [AggregateFunction.alias] on [AggregateFunction] instances. Each aggregation calculates - * a value (e.g., sum, average, count) based on the documents within its group. + * These are defined using [AliasedAggregate] expressions, which are typically created by calling + * [AggregateFunction.alias] on [AggregateFunction] instances. Each aggregation calculates a value + * (e.g., sum, average, count) based on the documents within its group. + * + * Example: + * ``` + * // Calculate the average rating for each genre. + * firestore.pipeline().collection("books") + * .aggregate( + * Aggregate + * .withAccumulators(avg("rating").as("avg_rating")) + * .withGroups("genre")); + * ``` * * @param aggregateStage An [AggregateStage] object that specifies the grouping fields (if any) * and the aggregation operations to perform. * @return A new [Pipeline] object with this stage appended to the stage list. */ - fun aggregate(aggregateStage: AggregateStage): Pipeline = append(aggregateStage) + fun aggregate(aggregateStage: AggregateStage): Pipeline = + aggregate(aggregateStage, AggregateOptions()) + + /** + * Performs optionally grouped aggregation operations on the documents from previous stages. + * + * This stage allows you to calculate aggregate values over a set of documents, optionally grouped + * by one or more fields or functions. You can specify: + * + * - **Grouping Fields or Expressions:** One or more fields or functions to group the documents + * by. For each distinct combination of values in these fields, a separate group is created. If no + * grouping fields are provided, a single group containing all documents is used. Not specifying + * groups is the same as putting the entire inputs into one group. + * + * - **AggregateFunctions:** One or more accumulation operations to perform within each group. + * These are defined using [AliasedAggregate] expressions, which are typically created by calling + * [AggregateFunction.alias] on [AggregateFunction] instances. Each aggregation calculates a value + * (e.g., sum, average, count) based on the documents within its group. + * + * @param aggregateStage An [AggregateStage] object that specifies the grouping fields (if any) + * and the aggregation operations to perform. + * @param options The [AggregateOptions] to use when performing the aggregation. + * @return A new [Pipeline] object with this stage appended to the stage list. + */ + fun aggregate(aggregateStage: AggregateStage, options: AggregateOptions): Pipeline = + append(aggregateStage.withOptions(options)) /** * Performs a vector similarity search, ordering the result set by most similar to least similar, @@ -460,10 +624,47 @@ internal constructor( * Performs a vector similarity search, ordering the result set by most similar to least similar, * and returning the first N documents in the result set. * - * @param stage An [FindNearestStage] object that specifies the search parameters. + * @param vectorField A [Field] that contains vector to search on. + * @param vectorValue The [Expr] that should evaluate to a [VectorValue] used to measure the + * distance from [vectorField] values in the documents. + * @param distanceMeasure specifies what type of distance is calculated when performing the + * search. * @return A new [Pipeline] object with this stage appended to the stage list. */ - fun findNearest(stage: FindNearestStage): Pipeline = append(stage) + fun findNearest( + vectorField: String, + vectorValue: Expr, + distanceMeasure: FindNearestStage.DistanceMeasure + ): Pipeline = findNearest(vectorField, vectorValue, distanceMeasure, FindNearestOptions()) + + /** + * Performs a vector similarity search, ordering the result set by most similar to least similar, + * and returning the first N documents in the result set. + * + * Example: + * ``` + * // Find books with similar "topicVectors" to the given targetVector + * firestore.pipeline().collection("books") + * .findNearest("topicVectors", targetVector, FindNearest.DistanceMeasure.COSINE, + * new FindNearestOptions() + * .withLimit(10) + * .withDistanceField("distance")); + * ``` + * + * @param vectorField A [Field] that contains vector to search on. + * @param vectorValue The [Expr] that should evaluate to a [VectorValue] used to measure the + * distance from [vectorField] values in the documents. + * @param distanceMeasure specifies what type of distance is calculated when performing the + * search. + * @param options The [FindNearestOptions] to use when performing the search. + * @return A new [Pipeline] object with this stage appended to the stage list. + */ + fun findNearest( + vectorField: String, + vectorValue: Expr, + distanceMeasure: FindNearestStage.DistanceMeasure, + options: FindNearestOptions + ): Pipeline = append(FindNearestStage.of(vectorField, vectorValue, distanceMeasure, options)) /** * Fully overwrites all fields in a document with those coming from a nested map. @@ -471,10 +672,30 @@ internal constructor( * This stage allows you to emit a map value as a document. Each key of the map becomes a field on * the document that contains the corresponding value. * + * Example: + * ``` + * // Input. + * // { + * // "name": "John Doe Jr.", + * // "parents": { + * // "father": "John Doe Sr.", + * // "mother": "Jane Doe" + * // } + * + * // Emit parents as document. + * firestore.pipeline().collection("people").replaceWith("parents"); + * + * // Output + * // { + * // "father": "John Doe Sr.", + * // "mother": "Jane Doe" + * // } + * ``` + * * @param field The [String] specifying the field name containing the nested map. * @return A new [Pipeline] object with this stage appended to the stage list. */ - fun replace(field: String): Pipeline = replace(field(field)) + fun replaceWith(field: String): Pipeline = replaceWith(field(field)) /** * Fully overwrites all fields in a document with those coming from a nested map. @@ -482,10 +703,30 @@ internal constructor( * This stage allows you to emit a map value as a document. Each key of the map becomes a field on * the document that contains the corresponding value. * + * Example: + * ``` + * // Input. + * // { + * // "name": "John Doe Jr.", + * // "parents": { + * // "father": "John Doe Sr.", + * // "mother": "Jane Doe" + * // } + * + * // Emit parents as document. + * firestore.pipeline().collection("people").replaceWith(field("parents")); + * + * // Output + * // { + * // "father": "John Doe Sr.", + * // "mother": "Jane Doe" + * // } + * ``` + * * @param mapValue The [Expr] or [Field] containing the nested map. * @return A new [Pipeline] object with this stage appended to the stage list. */ - fun replace(mapValue: Expr): Pipeline = + fun replaceWith(mapValue: Expr): Pipeline = append(ReplaceStage(mapValue, ReplaceStage.Mode.FULL_REPLACE)) /** @@ -496,6 +737,13 @@ internal constructor( * previous results are returned. If the previous stage produces more than size, this outputs a * sample of exactly size entries where any sample is equally likely. * + * Example: + * ``` + * // Sample 10 books, if available. + * firestore.pipeline().collection("books") + * .sample(10); + * ``` + * * @param documents The number of documents to emit. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -504,6 +752,17 @@ internal constructor( /** * Performs a pseudo-random sampling of the input documents. * + * Examples: + * ``` + * // Sample 10 books, if available. + * firestore.pipeline().collection("books") + * .sample(Sample.withDocLimit(10)); + * + * // Sample 50% of books. + * firestore.pipeline().collection("books") + * .sample(Sample.withPercentage(0.5)); + * ``` + * * @param sample An [SampleStage] object that specifies how sampling is performed. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -516,6 +775,13 @@ internal constructor( * from previous stage of the `other` Pipeline given in parameter. The order of documents emitted * from this stage is undefined. * + * Example: + * ``` + * // Emit documents from books collection and magazines collection. + * firestore.pipeline().collection("books") + * .union(firestore.pipeline().collection("magazines")); + * ``` + * * @param other The other [Pipeline] that is part of union. * @return A new [Pipeline] object with this stage appended to the stage list. */ @@ -531,6 +797,21 @@ internal constructor( * The element of the input array will be stored in a field with name specified by [alias] * parameter on the augmented document. * + * Example: + * ``` + * // Input: + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tags": [ "comedy", "space", "adventure" ], ... } + * + * // Emit a book document for each tag of the book. + * firestore.pipeline().collection("books") + * .unnest("tags", "tag"); + * + * // Output: + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tag": "comedy", ... } + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tag": "space", ... } + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tag": "adventure", ... } + * ``` + * * @param arrayField The name of the field containing the array. * @param alias The name of field to store emitted element of array. * @return A new [Pipeline] object with this stage appended to the stage list. @@ -552,7 +833,41 @@ internal constructor( * @param arrayWithAlias The input array with field alias to store output element of array. * @return A new [Pipeline] object with this stage appended to the stage list. */ - fun unnest(arrayWithAlias: Selectable): Pipeline = append(UnnestStage(arrayWithAlias)) + fun unnest(arrayWithAlias: Selectable): Pipeline = unnest(arrayWithAlias, UnnestOptions()) + + /** + * Takes a specified array from the input documents and outputs a document for each element with + * the element stored in a field with name specified by the alias. + * + * For each document emitted by the prior stage, this stage will emit zero or more augmented + * documents. The input array is found in parameter [arrayWithAlias], which can be an [Expr] with + * an alias specified via [Expr.alias], or a [Field] that can also have alias specified. For each + * element of the input array, an augmented document will be produced. The element of input array + * will be stored in a field with name specified by the alias of the [arrayWithAlias] parameter. + * If the [arrayWithAlias] is a [Field] with no alias, then the original array field will be + * replaced with the individual element. + * + * Example: + * ``` + * // Input: + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tags": [ "comedy", "space", "adventure" ], ... } + * + * // Emit a book document for each tag of the book. + * firestore.pipeline().collection("books") + * .unnest("tags", "tag", new UnnestOptions().withIndexField("tagIndex")); + * + * // Output: + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tagIndex": 0, "tag": "comedy", ... } + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tagIndex": 1, "tag": "space", ... } + * // { "title": "The Hitchhiker's Guide to the Galaxy", "tagIndex": 2, "tag": "adventure", ... } + * ``` + * + * @param arrayWithAlias The input array with field alias to store output element of array. + * @param options The [UnnestOptions] to use when performing the unnest. + * @return A new [Pipeline] object with this stage appended to the stage list. + */ + fun unnest(arrayWithAlias: Selectable, options: UnnestOptions): Pipeline = + append(UnnestStage(arrayWithAlias, options.options)) /** * Takes a specified array from the input documents and outputs a document for each element with @@ -583,7 +898,7 @@ class PipelineSource internal constructor(private val firestore: FirebaseFiresto * @throws [IllegalArgumentException] Thrown if the [query] provided targets a different project * or database than the pipeline. */ - fun convertFrom(query: Query): Pipeline { + fun createFrom(query: Query): Pipeline { if (query.firestore.databaseId != firestore.databaseId) { throw IllegalArgumentException("Provided query is from a different Firestore instance.") } @@ -598,12 +913,12 @@ class PipelineSource internal constructor(private val firestore: FirebaseFiresto * @throws [IllegalArgumentException] Thrown if the [aggregateQuery] provided targets a different * project or database than the pipeline. */ - fun convertFrom(aggregateQuery: AggregateQuery): Pipeline { + fun createFrom(aggregateQuery: AggregateQuery): Pipeline { val aggregateFields = aggregateQuery.aggregateFields - return convertFrom(aggregateQuery.query) + return createFrom(aggregateQuery.query) .aggregate( aggregateFields.first().toPipeline(), - *aggregateFields.drop(1).map(AggregateField::toPipeline).toTypedArray() + *aggregateFields.drop(1).map(AggregateField::toPipeline).toTypedArray() ) } @@ -623,22 +938,36 @@ class PipelineSource internal constructor(private val firestore: FirebaseFiresto * @throws [IllegalArgumentException] Thrown if the [ref] provided targets a different project or * database than the pipeline. */ - fun collection(ref: CollectionReference): Pipeline = - collection(CollectionSource.of(ref, firestore.databaseId)) + fun collection(ref: CollectionReference): Pipeline = collection(ref, CollectionSourceOptions()) /** - * Set the pipeline's source to the collection specified by CollectionSource. + * Set the pipeline's source to the collection specified by the given [CollectionReference]. * - * @param stage A [CollectionSource] that will be the source of this pipeline. + * @param ref A [CollectionReference] for a collection that will be the source of this pipeline. + * @param options [CollectionSourceOptions] for the collection. * @return A new [Pipeline] object with documents from target collection. - * @throws [IllegalArgumentException] Thrown if the [stage] provided targets a different project - * or database than the pipeline. + * @throws [IllegalArgumentException] Thrown if the [ref] provided targets a different project or + * database than the pipeline. */ - fun collection(stage: CollectionSource): Pipeline { - if (stage.serializer.databaseId() != firestore.databaseId) { - throw IllegalArgumentException("Provided collection is from a different Firestore instance.") + fun collection(ref: CollectionReference, options: CollectionSourceOptions): Pipeline { + if ( + ref.firestore.databaseId != firestore.databaseId || + ref.firestore.app?.options?.projectId != firestore.app?.options?.projectId + ) { + throw IllegalArgumentException( + "Invalid CollectionReference. The Firestore instance of the CollectionReference must match the Firestore instance of the PipelineSource." + ) } - return Pipeline(firestore, firestore.userDataReader, stage) + + return Pipeline( + firestore, + firestore.userDataReader, + CollectionSource( + ResourcePath.fromString(ref.path), + RemoteSerializer(firestore.databaseId), + options + ) + ) } /** @@ -648,10 +977,17 @@ class PipelineSource internal constructor(private val firestore: FirebaseFiresto * @return A new [Pipeline] object with documents from target collection group. */ fun collectionGroup(collectionId: String): Pipeline = - collectionGroup(CollectionGroupSource.of((collectionId))) + collectionGroup(collectionId, CollectionGroupOptions()) - internal fun collectionGroup(stage: CollectionGroupSource): Pipeline = - Pipeline(firestore, firestore.userDataReader, stage) + /** + * Set the pipeline's source to the collection group with the given id. + * + * @param collectionId The id of a collection group that will be the source of this pipeline. + * @param options [CollectionGroupOptions] for the collection group. + * @return A new [Pipeline] object with documents from target collection group. + */ + fun collectionGroup(collectionId: String, options: CollectionGroupOptions): Pipeline = + Pipeline(firestore, firestore.userDataReader, CollectionGroupSource(collectionId, options)) /** * Set the pipeline's source to be all documents in this database. @@ -699,6 +1035,8 @@ class PipelineSource internal constructor(private val firestore: FirebaseFiresto } /** + * A `PipelineSnapshot` contains the results of a pipeline execution. It can be iterated to retrieve + * the individual `PipelineResult` objects. */ class PipelineSnapshot internal constructor(executionTime: Timestamp, results: List) : @@ -711,6 +1049,18 @@ internal constructor(executionTime: Timestamp, results: List) : val results: List = results override fun iterator() = results.iterator() + + override fun toString() = "PipelineSnapshot{executionTime=$executionTime, results=$results}" + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + other as PipelineSnapshot + if (results != other.results) return false + return true + } + override fun hashCode(): Int { + return results.hashCode() + } } class PipelineResult @@ -811,6 +1161,20 @@ internal constructor( fun get(fieldPath: FieldPath): Any? = userDataWriter.convertValue(extractNestedValue(fieldPath)) override fun toString() = "PipelineResult{ref=$ref, updateTime=$updateTime}, data=${getData()}" + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + other as PipelineResult + if (ref != other.ref) return false + if (fields != other.fields) return false + return true + } + + override fun hashCode(): Int { + var result = ref?.hashCode() ?: 0 + result = 31 * result + fields.hashCode() + return result + } } internal interface PipelineResultObserver { diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/RealtimePipeline.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/RealtimePipeline.kt index e90fe69b1c6..81987ab36fa 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/RealtimePipeline.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/RealtimePipeline.kt @@ -23,9 +23,12 @@ import com.google.firebase.firestore.core.QueryOrPipeline import com.google.firebase.firestore.core.ViewSnapshot import com.google.firebase.firestore.model.Document import com.google.firebase.firestore.model.MutableDocument +import com.google.firebase.firestore.model.ResourcePath import com.google.firebase.firestore.pipeline.BooleanExpr +import com.google.firebase.firestore.pipeline.CollectionGroupOptions import com.google.firebase.firestore.pipeline.CollectionGroupSource import com.google.firebase.firestore.pipeline.CollectionSource +import com.google.firebase.firestore.pipeline.CollectionSourceOptions import com.google.firebase.firestore.pipeline.EvaluationContext import com.google.firebase.firestore.pipeline.Field import com.google.firebase.firestore.pipeline.FunctionExpr @@ -33,8 +36,6 @@ import com.google.firebase.firestore.pipeline.InternalOptions import com.google.firebase.firestore.pipeline.LimitStage import com.google.firebase.firestore.pipeline.OffsetStage import com.google.firebase.firestore.pipeline.Ordering -import com.google.firebase.firestore.pipeline.SelectStage -import com.google.firebase.firestore.pipeline.Selectable import com.google.firebase.firestore.pipeline.SortStage import com.google.firebase.firestore.pipeline.Stage import com.google.firebase.firestore.pipeline.WhereStage @@ -81,7 +82,13 @@ class RealtimePipelineSource internal constructor(private val firestore: Firebas * database than the pipeline. */ fun collection(ref: CollectionReference): RealtimePipeline = - collection(CollectionSource.of(ref, firestore.databaseId)) + collection( + CollectionSource( + ResourcePath.fromString(ref.path), + RemoteSerializer(firestore.databaseId), + CollectionSourceOptions() + ) + ) /** * Set the pipeline's source to the collection specified by CollectionSource. @@ -91,7 +98,7 @@ class RealtimePipelineSource internal constructor(private val firestore: Firebas * @throws [IllegalArgumentException] Thrown if the [stage] provided targets a different project * or database than the pipeline. */ - fun collection(stage: CollectionSource): RealtimePipeline { + internal fun collection(stage: CollectionSource): RealtimePipeline { if (stage.serializer.databaseId() != firestore.databaseId) { throw IllegalArgumentException("Provided collection is from a different Firestore instance.") } @@ -110,9 +117,9 @@ class RealtimePipelineSource internal constructor(private val firestore: Firebas * @return A new [RealtimePipeline] object with documents from target collection group. */ fun collectionGroup(collectionId: String): RealtimePipeline = - collectionGroup(CollectionGroupSource.of((collectionId))) + collectionGroup(CollectionGroupSource(collectionId, CollectionGroupOptions())) - fun collectionGroup(stage: CollectionGroupSource): RealtimePipeline = + internal fun collectionGroup(stage: CollectionGroupSource): RealtimePipeline = RealtimePipeline( firestore, RemoteSerializer(firestore.databaseId), @@ -144,23 +151,98 @@ internal constructor( private fun append(stage: Stage<*>): RealtimePipeline = with(stages.plus(stage)) + /** + * Limits the maximum number of documents returned by previous stages to `limit`. + * + * This stage is particularly useful when you want to retrieve a controlled subset of data from a + * potentially large result set. It's often used for: + * - **Pagination:** In combination with [where] to retrieve specific pages of results. + * - **Limiting Data Retrieval:** To prevent excessive data transfer and improve performance, + * especially when dealing with large collections. + * + * Example: + * ``` + * // Limit the results to the top 10 highest-rated books + * firestore.pipeline().collection("books") + * .sort(field("rating").descending()) + * .limit(10); + * ``` + * + * @param limit The maximum number of documents to return. + * @return A new [RealtimePipeline] object with this stage appended to the stage list. + */ fun limit(limit: Int): RealtimePipeline = append(LimitStage(limit)) - fun offset(offset: Int): RealtimePipeline = append(OffsetStage(offset)) - - fun select(selection: Selectable, vararg additionalSelections: Any): RealtimePipeline = - append(SelectStage.of(selection, *additionalSelections)) - - fun select(fieldName: String, vararg additionalSelections: Any): RealtimePipeline = - append(SelectStage.of(fieldName, *additionalSelections)) - + /** + * Sorts the documents from previous stages based on one or more [Ordering] criteria. + * + * This stage allows you to order the results of your pipeline. You can specify multiple + * [Ordering] instances to sort by multiple fields in ascending or descending order. If documents + * have the same value for a field used for sorting, the next specified ordering will be used. If + * all orderings result in equal comparison, the documents are considered equal and the order is + * unspecified. + * + * Example: + * ``` + * // Sort books by rating in descending order, and then by title in ascending order for books with the same rating + * firestore.pipeline().collection("books") + * .sort( + * Ordering.of("rating").descending(), + * Ordering.of("title") // Ascending order is the default + * ); + * ``` + * + * @param order The first [Ordering] instance specifying the sorting criteria. + * @param additionalOrders Optional additional [Ordering] instances specifying the sorting + * criteria. + * @return A new [RealtimePipeline] object with this stage appended to the stage list. + */ fun sort(order: Ordering, vararg additionalOrders: Ordering): RealtimePipeline = append(SortStage(arrayOf(order, *additionalOrders))) + /** + * Filters the documents from previous stages to only include those matching the specified + * [BooleanExpr]. + * + * This stage allows you to apply conditions to the data, similar to a "WHERE" clause in SQL. + * + * You can filter documents based on their field values, using implementations of [BooleanExpr], + * typically including but not limited to: + * - field comparators: [Expr.eq], [Expr.lt] (less than), [Expr.gt] (greater than), etc. + * - logical operators: [Expr.and], [Expr.or], [Expr.not], etc. + * - advanced functions: [Expr.arrayContains], [Expr.eqAny]etc. + * + * Example: + * ``` + * firestore.pipeline().collection("books") + * .where( + * and( + * gt("rating", 4.0), // Filter for ratings greater than 4.0 + * field("genre").eq("Science Fiction") // Equivalent to eq("genre", "Science Fiction") + * ) + * ); + * ``` + * + * @param condition The [BooleanExpr] to apply. + * @return A new [RealtimePipeline] object with this stage appended to the stage list. + */ fun where(condition: BooleanExpr): RealtimePipeline = append(WhereStage(condition)) + /** + * Starts listening to this pipeline and emits a [RealtimePipelineSnapshot] every time the results + * change. + * + * @return A [Flow] of [RealtimePipelineSnapshot] that emits new snapshots on every change. + */ fun snapshots(): Flow = snapshots(RealtimePipelineOptions.DEFAULT) + /** + * Starts listening to this pipeline and emits a [RealtimePipelineSnapshot] every time the results + * change. + * + * @param options The [RealtimePipelineOptions] to use for this listen. + * @return A [Flow] of [RealtimePipelineSnapshot] that emits new snapshots on every change. + */ fun snapshots(options: RealtimePipelineOptions): Flow = callbackFlow { val listener = addSnapshotListener(options) { snapshot, error -> @@ -173,20 +255,48 @@ internal constructor( awaitClose { listener.remove() } } + /** + * Starts listening to this pipeline using an [EventListener]. + * + * @param listener The event listener to receive the results. + * @return A [ListenerRegistration] that can be used to stop listening. + */ fun addSnapshotListener(listener: EventListener): ListenerRegistration = addSnapshotListener(RealtimePipelineOptions.DEFAULT, listener) + /** + * Starts listening to this pipeline using an [EventListener]. + * + * @param options The [RealtimePipelineOptions] to use for this listen. + * @param listener The event listener to receive the results. + * @return A [ListenerRegistration] that can be used to stop listening. + */ fun addSnapshotListener( options: RealtimePipelineOptions, listener: EventListener ): ListenerRegistration = addSnapshotListener(Executors.DEFAULT_CALLBACK_EXECUTOR, options, listener) + /** + * Starts listening to this pipeline using an [EventListener]. + * + * @param executor The executor to use for the listener. + * @param listener The event listener to receive the results. + * @return A [ListenerRegistration] that can be used to stop listening. + */ fun addSnapshotListener( executor: Executor, listener: EventListener ): ListenerRegistration = addSnapshotListener(executor, RealtimePipelineOptions.DEFAULT, listener) + /** + * Starts listening to this pipeline using an [EventListener]. + * + * @param executor The executor to use for the listener. + * @param options The [RealtimePipelineOptions] to use for this listen. + * @param listener The event listener to receive the results. + * @return A [ListenerRegistration] that can be used to stop listening. + */ fun addSnapshotListener( executor: Executor, options: RealtimePipelineOptions, @@ -424,19 +534,31 @@ private constructor( } } +/** + * A `RealtimePipelineSnapshot` contains the results of a realtime pipeline listen. It can be used + * to retrieve the full list of results, or the incremental changes since the last snapshot. + */ class RealtimePipelineSnapshot internal constructor( private val viewSnapshot: ViewSnapshot, private val firestore: FirebaseFirestore, private val options: RealtimePipelineOptions ) { + /** Returns the metadata for this snapshot. */ val metadata: PipelineSnapshotMetadata get() = PipelineSnapshotMetadata(viewSnapshot.hasPendingWrites(), !viewSnapshot.isFromCache) + /** Returns the results of the pipeline for this snapshot. */ val results: List get() = viewSnapshot.documents.map { PipelineResult(it, options.serverTimestampBehavior, firestore) } + /** + * Returns the incremental changes since the last snapshot. + * + * @param metadataChanges Whether to include metadata-only changes. + * @return A list of [PipelineResultChange] objects. + */ fun getChanges(metadataChanges: MetadataChanges? = null): List = changesFromSnapshot(metadataChanges ?: MetadataChanges.EXCLUDE, viewSnapshot) { doc, @@ -454,9 +576,26 @@ internal constructor( } } +/** + * Metadata about a [RealtimePipelineSnapshot], including information about the source of the data + * and whether the snapshot has pending writes. + * + * @property hasPendingWrites True if the snapshot contains results that have not yet been written + * to the backend. + * @property isConsistentBetweenListeners True if the snapshot is guaranteed to be consistent with + * other active listeners on the same Firestore instance. + */ data class PipelineSnapshotMetadata internal constructor(val hasPendingWrites: Boolean, val isConsistentBetweenListeners: Boolean) +/** + * A `PipelineResultChange` represents a change to a single result in a `RealtimePipelineSnapshot`. + * + * @property result The `PipelineResult` that changed. + * @property type The type of change. + * @property oldIndex The index of the result in the previous snapshot, or -1 if it's a new result. + * @property newIndex The index of the result in the new snapshot, or -1 if it was removed. + */ data class PipelineResultChange internal constructor( val result: PipelineResult, @@ -464,6 +603,7 @@ internal constructor( val oldIndex: Int?, val newIndex: Int? ) { + /** An enumeration of the different types of changes that can occur. */ enum class ChangeType { ADDED, MODIFIED, diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Query.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Query.java index 8b6e23b4112..263cb4cafee 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Query.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Query.java @@ -31,6 +31,7 @@ import com.google.firebase.firestore.model.FieldPath; import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.pipeline.BooleanExpr; +import com.google.firebase.firestore.pipeline.CollectionGroupOptions; import com.google.firebase.firestore.pipeline.CollectionGroupSource; import com.google.firebase.firestore.pipeline.CollectionSource; import com.google.firebase.firestore.pipeline.DocumentsSource; @@ -660,7 +661,7 @@ private Stage pipelineSource(DatabaseId databaseId) { if (isDocumentQuery()) { return new DocumentsSource(path.canonicalString()); } else if (isCollectionGroupQuery()) { - return CollectionGroupSource.of(collectionGroup); + return new CollectionGroupSource(collectionGroup, new CollectionGroupOptions()); } else { return new CollectionSource(path, new RemoteSerializer(databaseId), InternalOptions.EMPTY); } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.kt index 256e882407a..0dbf1fa9680 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.kt @@ -673,7 +673,7 @@ object Values { @JvmStatic fun encodeValue(value: ResourcePath): Value = - Value.newBuilder().setReferenceValue(value.canonicalString()).build() + Value.newBuilder().setReferenceValue("/${value.canonicalString()}").build() @JvmStatic fun encodeValue(date: Date): Value = encodeValue(com.google.firebase.Timestamp((date))) diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/aggregates.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/aggregates.kt index 0ce88ce385b..a86405be192 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/aggregates.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/aggregates.kt @@ -17,7 +17,7 @@ package com.google.firebase.firestore.pipeline import com.google.firebase.firestore.UserDataReader import com.google.firestore.v1.Value -class AggregateWithAlias +class AliasedAggregate internal constructor(internal val alias: String, internal val expr: AggregateFunction) /** A class that represents an aggregate function. */ @@ -33,6 +33,16 @@ private constructor( companion object { + /** + * Creates a generic aggregation function. + * + * This method provides a way to call aggregation functions that are supported by the Firestore + * backend but that are not available as specific factory methods in this class. + * + * @param name The name of the aggregation function. + * @param expr The expressions to pass as arguments to the function. + * @return A new [AggregateFunction] for the specified function. + */ @JvmStatic fun generic(name: String, vararg expr: Expr) = AggregateFunction(name, expr) /** @@ -66,14 +76,14 @@ private constructor( * @param condition The boolean expression to evaluate on each input. * @return A new [AggregateFunction] representing the count aggregation. */ - @JvmStatic fun countIf(condition: BooleanExpr) = AggregateFunction("countIf", condition) + @JvmStatic fun countIf(condition: BooleanExpr) = AggregateFunction("count_if", condition) /** * Creates an aggregation that calculates the sum of a field's values across multiple stage * inputs. * * @param fieldName The name of the field containing numeric values to sum up. - * @return A new [AggregateFunction] representing the average aggregation. + * @return A new [AggregateFunction] representing the sum aggregation. */ @JvmStatic fun sum(fieldName: String) = AggregateFunction("sum", fieldName) @@ -137,16 +147,34 @@ private constructor( * @return A new [AggregateFunction] representing the maximum aggregation. */ @JvmStatic fun maximum(expression: Expr) = AggregateFunction("max", expression) + + /** + * Creates an aggregation that counts the number of distinct values of a field across multiple + * stage inputs. + * + * @param fieldName The name of the field to count the distinct values of. + * @return A new [AggregateFunction] representing the count distinct aggregation. + */ + @JvmStatic fun countDistinct(fieldName: String) = AggregateFunction("count_distinct", fieldName) + + /** + * Creates an aggregation that counts the number of distinct values of an expression across + * multiple stage inputs. + * + * @param expression The expression to count the distinct values of. + * @return A new [AggregateFunction] representing the count distinct aggregation. + */ + @JvmStatic fun countDistinct(expression: Expr) = AggregateFunction("count_distinct", expression) } /** * Assigns an alias to this aggregate. * * @param alias The alias to assign to this aggregate. - * @return A new [AggregateWithAlias] that wraps this aggregate and associates it with the - * provided alias. + * @return A new [AliasedAggregate] that wraps this aggregate and associates it with the provided + * alias. */ - fun alias(alias: String) = AggregateWithAlias(alias, this) + fun alias(alias: String) = AliasedAggregate(alias, this) internal fun toProto(userDataReader: UserDataReader): Value { val builder = com.google.firestore.v1.Function.newBuilder() diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/expressions.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/expressions.kt index 14401daf748..8d248e1e980 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/expressions.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/expressions.kt @@ -264,7 +264,7 @@ abstract class Expr internal constructor() : Canonicalizable { /** * Create a vector constant for a [DoubleArray] value. * - * @param vector The [VectorValue] value. + * @param vector The [DoubleArray] value. * @return A [Expr] constant instance. */ @JvmStatic fun vector(vector: DoubleArray): Expr = Constant(Values.encodeVectorValue(vector)) @@ -682,7 +682,7 @@ abstract class Expr internal constructor() : Canonicalizable { FunctionExpr("round", evaluateRoundToPrecision, numericField, decimalPlace) /** - * Creates an expression that returns the smalled integer that isn't less than [numericExpr]. + * Creates an expression that returns the smallest integer that isn't less than [numericExpr]. * * @param numericExpr An expression that returns number when evaluated. * @return A new [Expr] representing an integer result from the ceil operation. @@ -690,7 +690,7 @@ abstract class Expr internal constructor() : Canonicalizable { @JvmStatic fun ceil(numericExpr: Expr): Expr = FunctionExpr("ceil", evaluateCeil, numericExpr) /** - * Creates an expression that returns the smalled integer that isn't less than [numericField]. + * Creates an expression that returns the smallest integer that isn't less than [numericField]. * * @param numericField Name of field that returns number when evaluated. * @return A new [Expr] representing an integer result from the ceil operation. @@ -699,7 +699,8 @@ abstract class Expr internal constructor() : Canonicalizable { fun ceil(numericField: String): Expr = FunctionExpr("ceil", evaluateCeil, numericField) /** - * Creates an expression that returns the largest integer that isn't less than [numericExpr]. + * Creates an expression that returns the largest integer that is not greater than [numericExpr] + * . * * @param numericExpr An expression that returns number when evaluated. * @return A new [Expr] representing an integer result from the floor operation. @@ -708,7 +709,8 @@ abstract class Expr internal constructor() : Canonicalizable { fun floor(numericExpr: Expr): Expr = FunctionExpr("floor", evaluateFloor, numericExpr) /** - * Creates an expression that returns the largest integer that isn't less than [numericField]. + * Creates an expression that returns the largest integer that is not greater than + * [numericField]. * * @param numericField Name of field that returns number when evaluated. * @return A new [Expr] representing an integer result from the floor operation. @@ -1249,7 +1251,7 @@ abstract class Expr internal constructor() : Canonicalizable { @JvmStatic fun isNan(expr: Expr): BooleanExpr = BooleanExpr("is_nan", evaluateIsNaN, expr) /** - * Creates an expression that checks if [expr] evaluates to 'NaN' (Not a Number). + * Creates an expression that checks if the field's value evaluates to 'NaN' (Not a Number). * * @param fieldName The field to check. * @return A new [BooleanExpr] representing the isNan operation. @@ -1267,8 +1269,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun isNotNan(expr: Expr): BooleanExpr = BooleanExpr("is_not_nan", evaluateIsNotNaN, expr) /** - * Creates an expression that checks if the results of this expression is NOT 'NaN' (Not a - * Number). + * Creates an expression that checks if the field's value is NOT 'NaN' (Not a Number). * * @param fieldName The field to check. * @return A new [BooleanExpr] representing the isNotNan operation. @@ -1278,7 +1279,7 @@ abstract class Expr internal constructor() : Canonicalizable { BooleanExpr("is_not_nan", evaluateIsNotNaN, fieldName) /** - * Creates an expression that checks if tbe result of [expr] is null. + * Creates an expression that checks if the result of [expr] is null. * * @param expr The expression to check. * @return A new [BooleanExpr] representing the isNull operation. @@ -1286,7 +1287,7 @@ abstract class Expr internal constructor() : Canonicalizable { @JvmStatic fun isNull(expr: Expr): BooleanExpr = BooleanExpr("is_null", evaluateIsNull, expr) /** - * Creates an expression that checks if tbe value of a field is null. + * Creates an expression that checks if the value of a field is null. * * @param fieldName The field to check. * @return A new [BooleanExpr] representing the isNull operation. @@ -1295,7 +1296,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun isNull(fieldName: String): BooleanExpr = BooleanExpr("is_null", evaluateIsNull, fieldName) /** - * Creates an expression that checks if tbe result of [expr] is not null. + * Creates an expression that checks if the result of [expr] is not null. * * @param expr The expression to check. * @return A new [BooleanExpr] representing the isNotNull operation. @@ -1304,7 +1305,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun isNotNull(expr: Expr): BooleanExpr = BooleanExpr("is_not_null", evaluateIsNotNull, expr) /** - * Creates an expression that checks if tbe value of a field is not null. + * Creates an expression that checks if the value of a field is not null. * * @param fieldName The field to check. * @return A new [BooleanExpr] representing the isNotNull operation. @@ -1509,7 +1510,7 @@ abstract class Expr internal constructor() : Canonicalizable { BooleanExpr("like", evaluateLike, fieldName, pattern) /** - * Creates an expression that return a pseudo-random number of type double in the range of [0, + * Creates an expression that returns a pseudo-random number of type double in the range of [0, * 1), inclusive of 0 and exclusive of 1. * * @return A new [Expr] representing the random number operation. @@ -1618,7 +1619,7 @@ abstract class Expr internal constructor() : Canonicalizable { */ @JvmStatic fun logicalMaximum(expr: Expr, vararg others: Any): Expr = - FunctionExpr("logical_max", evaluateLogicalMaximum, expr, *others) + FunctionExpr("max", evaluateLogicalMaximum, expr, *others) /** * Creates an expression that returns the largest value between multiple input expressions or @@ -1630,7 +1631,7 @@ abstract class Expr internal constructor() : Canonicalizable { */ @JvmStatic fun logicalMaximum(fieldName: String, vararg others: Any): Expr = - FunctionExpr("logical_max", evaluateLogicalMaximum, fieldName, *others) + FunctionExpr("max", evaluateLogicalMaximum, fieldName, *others) /** * Creates an expression that returns the smallest value between multiple input expressions or @@ -1642,7 +1643,7 @@ abstract class Expr internal constructor() : Canonicalizable { */ @JvmStatic fun logicalMinimum(expr: Expr, vararg others: Any): Expr = - FunctionExpr("logical_min", evaluateLogicalMinimum, expr, *others) + FunctionExpr("min", evaluateLogicalMinimum, expr, *others) /** * Creates an expression that returns the smallest value between multiple input expressions or @@ -1654,7 +1655,7 @@ abstract class Expr internal constructor() : Canonicalizable { */ @JvmStatic fun logicalMinimum(fieldName: String, vararg others: Any): Expr = - FunctionExpr("logical_min", evaluateLogicalMinimum, fieldName, *others) + FunctionExpr("min", evaluateLogicalMinimum, fieldName, *others) /** * Creates an expression that reverses a string. @@ -1675,6 +1676,26 @@ abstract class Expr internal constructor() : Canonicalizable { @JvmStatic fun reverse(fieldName: String): Expr = FunctionExpr("reverse", evaluateReverse, fieldName) + /** + * Creates an expression that reverses a string. + * + * @param stringExpression An expression evaluating to a string value, which will be reversed. + * @return A new [Expr] representing the reversed string. + */ + @JvmStatic + fun strReverse(stringExpression: Expr): Expr = + FunctionExpr("str_reverse", evaluateReverse, stringExpression) + + /** + * Creates an expression that reverses a string value from the specified field. + * + * @param fieldName The name of the field that contains the string to reverse. + * @return A new [Expr] representing the reversed string. + */ + @JvmStatic + fun strReverse(fieldName: String): Expr = + FunctionExpr("str_reverse", evaluateReverse, fieldName) + /** * Creates an expression that checks if a string expression contains a specified substring. * @@ -1854,7 +1875,7 @@ abstract class Expr internal constructor() : Canonicalizable { * Creates an expression that converts a string expression to uppercase. * * @param stringExpression The expression representing the string to convert to uppercase. - * @return A new [Expr] representing the lowercase string. + * @return A new [Expr] representing the uppercase string. */ @JvmStatic fun toUpper(stringExpression: Expr): Expr = @@ -1864,7 +1885,7 @@ abstract class Expr internal constructor() : Canonicalizable { * Creates an expression that converts a string field to uppercase. * * @param fieldName The name of the field containing the string to convert to uppercase. - * @return A new [Expr] representing the lowercase string. + * @return A new [Expr] representing the uppercase string. */ @JvmStatic fun toUpper(fieldName: String): Expr = FunctionExpr("to_upper", evaluateToUppercase, fieldName) @@ -3016,8 +3037,8 @@ abstract class Expr internal constructor() : Canonicalizable { * @return A new [Expr] representing the arrayOffset operation. */ @JvmStatic - fun arrayOffset(array: Expr, offset: Expr): Expr = - FunctionExpr("array_offset", notImplemented, array, offset) + fun arrayGet(array: Expr, offset: Expr): Expr = + FunctionExpr("array_get", notImplemented, array, offset) /** * Creates an expression that indexes into an array from the beginning or end and return the @@ -3029,8 +3050,8 @@ abstract class Expr internal constructor() : Canonicalizable { * @return A new [Expr] representing the arrayOffset operation. */ @JvmStatic - fun arrayOffset(array: Expr, offset: Int): Expr = - FunctionExpr("array_offset", notImplemented, array, constant(offset)) + fun arrayGet(array: Expr, offset: Int): Expr = + FunctionExpr("array_get", notImplemented, array, constant(offset)) /** * Creates an expression that indexes into an array from the beginning or end and return the @@ -3042,8 +3063,8 @@ abstract class Expr internal constructor() : Canonicalizable { * @return A new [Expr] representing the arrayOffset operation. */ @JvmStatic - fun arrayOffset(arrayFieldName: String, offset: Expr): Expr = - FunctionExpr("array_offset", notImplemented, arrayFieldName, offset) + fun arrayGet(arrayFieldName: String, offset: Expr): Expr = + FunctionExpr("array_get", notImplemented, arrayFieldName, offset) /** * Creates an expression that indexes into an array from the beginning or end and return the @@ -3055,8 +3076,8 @@ abstract class Expr internal constructor() : Canonicalizable { * @return A new [Expr] representing the arrayOffset operation. */ @JvmStatic - fun arrayOffset(arrayFieldName: String, offset: Int): Expr = - FunctionExpr("array_offset", notImplemented, arrayFieldName, constant(offset)) + fun arrayGet(arrayFieldName: String, offset: Int): Expr = + FunctionExpr("array_get", notImplemented, arrayFieldName, constant(offset)) /** * Creates a conditional expression that evaluates to a [thenExpr] expression if a condition is @@ -3270,10 +3291,10 @@ abstract class Expr internal constructor() : Canonicalizable { * to calculated values. * * @param alias The alias to assign to this expression. - * @return A new [Selectable] (typically an [ExprWithAlias]) that wraps this expression and + * @return A new [Selectable] (typically an [AliasedExpr]) that wraps this expression and * associates it with the provided alias. */ - open fun alias(alias: String): Selectable = ExprWithAlias(alias, this) + open fun alias(alias: String): Selectable = AliasedExpr(alias, this) /** * Creates an expression that returns the document ID from this path expression. @@ -3394,7 +3415,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun roundToPrecision(decimalPlace: Expr): Expr = Companion.roundToPrecision(this, decimalPlace) /** - * Creates an expression that returns the smalled integer that isn't less than this numeric + * Creates an expression that returns the smallest integer that isn't less than this numeric * expression. * * @return A new [Expr] representing an integer result from the ceil operation. @@ -3402,7 +3423,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun ceil(): Expr = Companion.ceil(this) /** - * Creates an expression that returns the largest integer that isn't less than this numeric + * Creates an expression that returns the largest integer that is not greater than this numeric * expression. * * @return A new [Expr] representing an integer result from the floor operation. @@ -3475,7 +3496,7 @@ abstract class Expr internal constructor() : Canonicalizable { fun notEqAny(arrayExpression: Expr): BooleanExpr = Companion.notEqAny(this, arrayExpression) /** - * Creates an expression that returns true if yhe result of this expression is absent. Otherwise, + * Creates an expression that returns true if the result of this expression is absent. Otherwise, * returns false even if the value is null. * * @return A new [BooleanExpr] representing the isAbsent operation. @@ -3498,14 +3519,14 @@ abstract class Expr internal constructor() : Canonicalizable { fun isNotNan(): BooleanExpr = Companion.isNotNan(this) /** - * Creates an expression that checks if tbe result of this expression is null. + * Creates an expression that checks if the result of this expression is null. * * @return A new [BooleanExpr] representing the isNull operation. */ fun isNull(): BooleanExpr = Companion.isNull(this) /** - * Creates an expression that checks if tbe result of this expression is not null. + * Creates an expression that checks if the result of this expression is not null. * * @return A new [BooleanExpr] representing the isNotNull operation. */ @@ -3661,6 +3682,13 @@ abstract class Expr internal constructor() : Canonicalizable { */ fun reverse(): Expr = Companion.reverse(this) + /** + * Creates an expression that reverses this string expression. + * + * @return A new [Expr] representing the reversed string. + */ + fun strReverse(): Expr = Companion.strReverse(this) + /** * Creates an expression that checks if this string expression contains a specified substring. * @@ -3681,15 +3709,15 @@ abstract class Expr internal constructor() : Canonicalizable { * Creates an expression that checks if this string expression starts with a given [prefix]. * * @param prefix The prefix string expression to check for. - * @return A new [Expr] representing the the 'starts with' comparison. + * @return A new [BooleanExpr] representing the 'starts with' comparison. */ fun startsWith(prefix: Expr): BooleanExpr = Companion.startsWith(this, prefix) /** * Creates an expression that checks if this string expression starts with a given [prefix]. * - * @param prefix The prefix string expression to check for. - * @return A new [Expr] representing the 'starts with' comparison. + * @param prefix The prefix string to check for. + * @return A new [BooleanExpr] representing the 'starts with' comparison. */ fun startsWith(prefix: String): BooleanExpr = Companion.startsWith(this, prefix) @@ -3697,7 +3725,7 @@ abstract class Expr internal constructor() : Canonicalizable { * Creates an expression that checks if this string expression ends with a given [suffix]. * * @param suffix The suffix string expression to check for. - * @return A new [Expr] representing the 'ends with' comparison. + * @return A new [BooleanExpr] representing the 'ends with' comparison. */ fun endsWith(suffix: Expr): BooleanExpr = Companion.endsWith(this, suffix) @@ -3705,7 +3733,7 @@ abstract class Expr internal constructor() : Canonicalizable { * Creates an expression that checks if this string expression ends with a given [suffix]. * * @param suffix The suffix string to check for. - * @return A new [Expr] representing the the 'ends with' comparison. + * @return A new [BooleanExpr] representing the 'ends with' comparison. */ fun endsWith(suffix: String) = Companion.endsWith(this, suffix) @@ -3738,7 +3766,7 @@ abstract class Expr internal constructor() : Canonicalizable { /** * Creates an expression that converts this string expression to uppercase. * - * @return A new [Expr] representing the lowercase string. + * @return A new [Expr] representing the uppercase string. */ fun toUpper() = Companion.toUpper(this) @@ -4076,7 +4104,7 @@ abstract class Expr internal constructor() : Canonicalizable { * @param offset An Expr evaluating to the index of the element to return. * @return A new [Expr] representing the arrayOffset operation. */ - fun arrayOffset(offset: Expr) = Companion.arrayOffset(this, offset) + fun arrayGet(offset: Expr) = Companion.arrayGet(this, offset) /** * Creates an expression that indexes into an array from the beginning or end and return the @@ -4086,7 +4114,7 @@ abstract class Expr internal constructor() : Canonicalizable { * @param offset An Expr evaluating to the index of the element to return. * @return A new [Expr] representing the arrayOffset operation. */ - fun arrayOffset(offset: Int) = Companion.arrayOffset(this, offset) + fun arrayGet(offset: Int) = Companion.arrayGet(this, offset) /** * Creates an aggregation that counts the number of stage inputs with valid evaluations of the @@ -4297,7 +4325,7 @@ abstract class Selectable : Expr() { } /** Represents an expression that will be given the alias in the output document. */ -class ExprWithAlias internal constructor(override val alias: String, override val expr: Expr) : +class AliasedExpr internal constructor(override val alias: String, override val expr: Expr) : Selectable() { override fun toProto(userDataReader: UserDataReader): Value = expr.toProto(userDataReader) override fun evaluateFunction(context: EvaluationContext) = expr.evaluateFunction(context) @@ -4305,7 +4333,7 @@ class ExprWithAlias internal constructor(override val alias: String, override va override fun equals(other: Any?): Boolean { if (this === other) return true - if (other !is ExprWithAlias) return false + if (other !is AliasedExpr) return false if (alias != other.alias) return false if (expr != other.expr) return false return true diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/options.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/options.kt index d690ea14871..8c9485c78b9 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/options.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/options.kt @@ -45,6 +45,18 @@ class InternalOptions internal constructor(private val options: ImmutableMap): InternalOptions { + return with(key, value.options) + } + + internal fun adding(newOptions: InternalOptions): InternalOptions { + val builder = + ImmutableMap.builderWithExpectedSize(options.size + newOptions.options.size) + builder.putAll(options) + builder.putAll(newOptions.options) + return InternalOptions(builder.build()) + } + internal fun forEach(f: (String, Value) -> Unit) { for (entry in options.entries) { f(entry.key, entry.value) @@ -74,6 +86,18 @@ internal constructor(internal val options: InternalOptions) { protected fun with(key: String, value: Value): T = self(options.with(key, value)) + protected fun with(key: String, vararg values: String): T { + return self(options.with(key, listOf(*values).map { s: String -> Values.encodeValue(s) })) + } + + protected fun with(key: String, subSection: AbstractOptions<*>): T { + return self(options.with(key, subSection.options)) + } + + protected fun adding(newOptions: AbstractOptions<*>): T { + return self(options.adding(newOptions.options)) + } + /** * Specify generic [String] option * @@ -141,11 +165,9 @@ class RawOptions private constructor(options: InternalOptions) : class PipelineOptions private constructor(options: InternalOptions) : AbstractOptions(options) { - override fun self(options: InternalOptions) = PipelineOptions(options) + constructor() : this(InternalOptions.EMPTY) - companion object { - @JvmField val DEFAULT: PipelineOptions = PipelineOptions(InternalOptions.EMPTY) - } + override fun self(options: InternalOptions) = PipelineOptions(options) class IndexMode private constructor(internal val value: String) { companion object { diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/stage.kt b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/stage.kt index 72902da9656..c440bf8852e 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/stage.kt +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/pipeline/stage.kt @@ -14,11 +14,9 @@ package com.google.firebase.firestore.pipeline -import com.google.firebase.firestore.CollectionReference import com.google.firebase.firestore.UserDataReader import com.google.firebase.firestore.VectorValue import com.google.firebase.firestore.core.Canonicalizable -import com.google.firebase.firestore.model.DatabaseId import com.google.firebase.firestore.model.Document import com.google.firebase.firestore.model.DocumentKey.KEY_FIELD_NAME import com.google.firebase.firestore.model.MutableDocument @@ -28,9 +26,9 @@ import com.google.firebase.firestore.model.Values.encodeValue import com.google.firebase.firestore.pipeline.Expr.Companion.constant import com.google.firebase.firestore.pipeline.Expr.Companion.field import com.google.firebase.firestore.remote.RemoteSerializer -import com.google.firebase.firestore.util.Preconditions import com.google.firestore.v1.Pipeline import com.google.firestore.v1.Value +import javax.annotation.Nonnull sealed class Stage>(internal val name: String, internal val options: InternalOptions) { internal fun toProtoStage(userDataReader: UserDataReader): Pipeline.Stage { @@ -118,7 +116,7 @@ private constructor( * Specify name of stage * * @param name The unique name of the stage to add. - * @return [RawStage] with specified parameters. + * @return A new [RawStage] for the specified stage name. */ @JvmStatic fun ofName(name: String) = RawStage(name, emptyList(), InternalOptions.EMPTY) } @@ -200,6 +198,13 @@ internal constructor( internal val serializer: RemoteSerializer, options: InternalOptions ) : Stage("collection", options), Canonicalizable { + + internal constructor( + path: ResourcePath, + serializer: RemoteSerializer, + options: CollectionSourceOptions + ) : this(path, serializer, options.options) + override fun canonicalId(): String { return "${name}(${path.canonicalString()})" } @@ -224,24 +229,6 @@ internal constructor( CollectionSource(path, serializer, options) override fun args(userDataReader: UserDataReader): Sequence = sequenceOf(Value.newBuilder().setReferenceValue("/${path.canonicalString()}").build()) - companion object { - /** - * Set the pipeline's source to the collection specified by the given CollectionReference. - * - * @param ref A CollectionReference for a collection that will be the source of this pipeline. - * @return Pipeline with documents from target collection. - */ - @JvmStatic - internal fun of(ref: CollectionReference, databaseId: DatabaseId): CollectionSource { - return CollectionSource( - ResourcePath.fromString(ref.path), - RemoteSerializer(databaseId), - InternalOptions.EMPTY - ) - } - } - - fun withForceIndex(value: String) = withOption("force_index", value) override fun evaluate( context: EvaluationContext, @@ -251,8 +238,62 @@ internal constructor( } } +class CollectionSourceOptions internal constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `CollectionSourceOptions` object. */ + constructor() : this(InternalOptions.EMPTY) + + /** + * Specifies query hints for the collection source. + * + * @param hints The hints to apply to the collection source. + * @return A new `CollectionSourceOptions` with the specified hints. + */ + fun withHints(hints: CollectionHints) = adding(hints) + + override fun self(options: InternalOptions): CollectionSourceOptions { + return CollectionSourceOptions(options) + } +} + +class CollectionHints internal constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `CollectionHints` object. */ + constructor() : this(InternalOptions.EMPTY) + + public override fun self(options: InternalOptions): CollectionHints { + return CollectionHints(options) + } + + /** + * Forces the query to use a specific index. + * + * @param value The name of the index to force. + * @return A new `CollectionHints` with the specified forced index. + */ + fun withForceIndex(value: String): CollectionHints { + return with("force_index", value) + } + + /** + * Specifies fields to ignore in the index. + * + * @param values The names of the fields to ignore in the index. + * @return A new `CollectionHints` with the specified ignored index fields. + */ + fun withIgnoreIndexFields(vararg values: String): CollectionHints { + return with("ignore_index_fields", *values) + } +} + class CollectionGroupSource(val collectionId: String, options: InternalOptions) : Stage("collection_group", options), Canonicalizable { + + internal constructor( + collectionId: String, + options: CollectionGroupOptions + ) : this(collectionId, options.options) + override fun canonicalId(): String { return "${name}(${collectionId})" } @@ -282,25 +323,24 @@ class CollectionGroupSource(val collectionId: String, options: InternalOptions) input.isFoundDocument && input.key.collectionGroup == collectionId } } +} - companion object { +class CollectionGroupOptions internal constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `CollectionGroupOptions` object. */ + constructor() : this(InternalOptions.EMPTY) - /** - * Set the pipeline's source to the collection group with the given id. - * - * @param collectionId The id of a collection group that will be the source of this pipeline. - */ - @JvmStatic - fun of(collectionId: String): CollectionGroupSource { - Preconditions.checkNotNull(collectionId, "Provided collection ID must not be null.") - require(!collectionId.contains("/")) { - "Invalid collectionId '$collectionId'. Collection IDs must not contain '/'." - } - return CollectionGroupSource(collectionId, InternalOptions.EMPTY) - } + public override fun self(options: InternalOptions): CollectionGroupOptions { + return CollectionGroupOptions(options) } - fun withForceIndex(value: String) = withOption("force_index", value) + /** + * Specifies query hints for the collection group source. + * + * @param hints The hints to apply to the collection group source. + * @return A new `CollectionGroupOptions` with the specified hints. + */ + fun withHints(hints: CollectionHints): CollectionGroupOptions = adding(hints) } internal class DocumentsSource @@ -391,12 +431,12 @@ internal constructor( * groups is the same as putting the entire inputs into one group. * * - **AggregateFunctions:** One or more accumulation operations to perform within each group. These - * are defined using [AggregateWithAlias] expressions, which are typically created by calling + * are defined using [AliasedAggregate] expressions, which are typically created by calling * [AggregateFunction.alias] on [AggregateFunction] instances. Each aggregation calculates a value * (e.g., sum, average, count) based on the documents within its group. */ class AggregateStage -internal constructor( +private constructor( private val accumulators: Map, private val groups: Map, options: InternalOptions = InternalOptions.EMPTY @@ -407,16 +447,16 @@ internal constructor( /** * Create [AggregateStage] with one or more accumulators. * - * @param accumulator The first [AggregateWithAlias] expression, wrapping an {@link - * AggregateFunction} with an alias for the accumulated results. - * @param additionalAccumulators The [AggregateWithAlias] expressions, each wrapping an + * @param accumulator The first [AliasedAggregate] expression, wrapping an [AggregateFunction] + * with an alias for the accumulated results. + * @param additionalAccumulators The [AliasedAggregate] expressions, each wrapping an * [AggregateFunction] with an alias for the accumulated results. * @return [AggregateStage] with specified accumulators. */ @JvmStatic fun withAccumulators( - accumulator: AggregateWithAlias, - vararg additionalAccumulators: AggregateWithAlias + accumulator: AliasedAggregate, + vararg additionalAccumulators: AliasedAggregate ): AggregateStage { return AggregateStage( mapOf(accumulator.alias to accumulator.expr) @@ -441,8 +481,7 @@ internal constructor( /** * Add one or more groups to [AggregateStage] * - * @param groupField The [Selectable] expression to consider when determining group value - * combinations. + * @param group The [Selectable] expression to consider when determining group value combinations. * @param additionalGroups The [Selectable] expressions to consider when determining group value * combinations or [String]s representing field names. * @return [AggregateStage] with specified groups. @@ -451,9 +490,13 @@ internal constructor( AggregateStage( accumulators, mapOf(group.alias to group.expr) - .plus(additionalGroups.map(Selectable::toSelectable).associateBy(Selectable::alias)) + .plus(additionalGroups.map(Selectable::toSelectable).associateBy(Selectable::alias)), + options ) + internal fun withOptions(options: AggregateOptions) = + AggregateStage(accumulators, groups, options.options) + override fun args(userDataReader: UserDataReader): Sequence = sequenceOf( encodeValue(accumulators.mapValues { entry -> entry.value.toProto(userDataReader) }), @@ -477,6 +520,38 @@ internal constructor( } } +class AggregateHints internal constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `AggregateHints` object. */ + constructor() : this(InternalOptions.EMPTY) + + public override fun self(options: InternalOptions): AggregateHints { + return AggregateHints(options) + } + + fun withForceStreamableEnabled(): AggregateHints { + return with("force_streamable", true) + } +} + +class AggregateOptions internal constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `AggregateOptions` object. */ + constructor() : this(InternalOptions.EMPTY) + + public override fun self(options: InternalOptions): AggregateOptions { + return AggregateOptions(options) + } + + /** + * Specifies query hints for the aggregation. + * + * @param hints The hints to apply to the aggregation. + * @return A new `AggregateOptions` with the specified hints. + */ + fun withHints(hints: AggregateHints): AggregateOptions = adding(hints) +} + internal class WhereStage internal constructor( internal val condition: Expr, @@ -525,6 +600,13 @@ internal constructor( options: InternalOptions = InternalOptions.EMPTY ) : Stage("find_nearest", options) { + private constructor( + property: Expr, + vector: Expr, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions + ) : this(property, vector, distanceMeasure, options.options) + companion object { /** @@ -538,8 +620,12 @@ internal constructor( * @return [FindNearestStage] with specified parameters. */ @JvmStatic - fun of(vectorField: Field, vectorValue: VectorValue, distanceMeasure: DistanceMeasure) = - FindNearestStage(vectorField, constant(vectorValue), distanceMeasure) + internal fun of( + vectorField: Field, + vectorValue: VectorValue, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions = FindNearestOptions() + ) = FindNearestStage(vectorField, constant(vectorValue), distanceMeasure, options) /** * Create [FindNearestStage]. @@ -552,8 +638,12 @@ internal constructor( * @return [FindNearestStage] with specified parameters. */ @JvmStatic - fun of(vectorField: Field, vectorValue: DoubleArray, distanceMeasure: DistanceMeasure) = - FindNearestStage(vectorField, Expr.vector(vectorValue), distanceMeasure) + internal fun of( + vectorField: Field, + vectorValue: DoubleArray, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions = FindNearestOptions() + ) = FindNearestStage(vectorField, Expr.vector(vectorValue), distanceMeasure, options) /** * Create [FindNearestStage]. @@ -566,8 +656,12 @@ internal constructor( * @return [FindNearestStage] with specified parameters. */ @JvmStatic - fun of(vectorField: String, vectorValue: VectorValue, distanceMeasure: DistanceMeasure) = - FindNearestStage(constant(vectorField), constant(vectorValue), distanceMeasure) + internal fun of( + vectorField: String, + vectorValue: VectorValue, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions = FindNearestOptions() + ) = FindNearestStage(field(vectorField), constant(vectorValue), distanceMeasure, options) /** * Create [FindNearestStage]. @@ -580,18 +674,32 @@ internal constructor( * @return [FindNearestStage] with specified parameters. */ @JvmStatic - fun of(vectorField: String, vectorValue: DoubleArray, distanceMeasure: DistanceMeasure) = - FindNearestStage(constant(vectorField), Expr.vector(vectorValue), distanceMeasure) + internal fun of( + vectorField: String, + vectorValue: DoubleArray, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions = FindNearestOptions() + ) = FindNearestStage(field(vectorField), Expr.vector(vectorValue), distanceMeasure, options) + + internal fun of( + vectorField: String, + vectorValue: Expr, + distanceMeasure: DistanceMeasure, + options: FindNearestOptions = FindNearestOptions() + ) = FindNearestStage(field(vectorField), vectorValue, distanceMeasure, options) } class DistanceMeasure private constructor(internal val proto: Value) { private constructor(protoString: String) : this(encodeValue(protoString)) companion object { + /** The Euclidean distance measure. */ @JvmField val EUCLIDEAN = DistanceMeasure("euclidean") + /** The Cosine distance measure. */ @JvmField val COSINE = DistanceMeasure("cosine") + /** The Dot Product distance measure. */ @JvmField val DOT_PRODUCT = DistanceMeasure("dot_product") } } @@ -623,32 +731,46 @@ internal constructor( result = 31 * result + options.hashCode() return result } +} + +class FindNearestOptions private constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `FindNearestOptions` object. */ + constructor() : this(InternalOptions.EMPTY) + + public override fun self(options: InternalOptions): FindNearestOptions { + return FindNearestOptions(options) + } /** * Specifies the upper bound of documents to return. * * @param limit must be a positive integer. - * @return [FindNearestStage] with specified [limit]. + * @return A new `FindNearestOptions` with the specified limit. */ - fun withLimit(limit: Long): FindNearestStage = withOption("limit", limit) + fun withLimit(limit: Long): FindNearestOptions { + return with("limit", limit) + } /** * Add a field containing the distance to the result. * * @param distanceField The [Field] that will be added to the result. - * @return [FindNearestStage] with specified [distanceField]. + * @return A new `FindNearestOptions` with the specified distance field. */ - fun withDistanceField(distanceField: Field): FindNearestStage = - withOption("distance_field", distanceField) + fun withDistanceField(distanceField: Field): FindNearestOptions { + return with("distance_field", distanceField) + } /** * Add a field containing the distance to the result. * * @param distanceField The name of the field that will be added to the result. - * @return [FindNearestStage] with specified [distanceField]. + * @return A new `FindNearestOptions` with the specified distance field. */ - fun withDistanceField(distanceField: String): FindNearestStage = - withDistanceField(field(distanceField)) + fun withDistanceField(distanceField: String?): FindNearestOptions? { + return withDistanceField(field(distanceField!!)) + } } internal class LimitStage @@ -885,7 +1007,7 @@ internal constructor( private val mapValue: Expr, private val mode: Mode, options: InternalOptions = InternalOptions.EMPTY -) : Stage("replace", options) { +) : Stage("replace_with", options) { class Mode private constructor(internal val proto: Value) { private constructor(protoString: String) : this(encodeValue(protoString)) companion object { @@ -933,7 +1055,9 @@ private constructor( class Mode private constructor(internal val proto: Value) { private constructor(protoString: String) : this(encodeValue(protoString)) companion object { + /** Sample by a fixed number of documents. */ val DOCUMENTS = Mode("documents") + /** Sample by a percentage of documents. */ val PERCENT = Mode("percent") } } @@ -1030,7 +1154,7 @@ internal constructor( * original array field will be replaced with the individual element. * * @param arrayWithAlias The input array with field alias to store output element of array. - * @return [SampleStage] with input array and alias specified. + * @return [UnnestStage] with input array and alias specified. */ @JvmStatic fun withField(arrayWithAlias: Selectable) = UnnestStage(arrayWithAlias) @@ -1043,7 +1167,7 @@ internal constructor( * document. The element of the input array will be stored in a field with name specified by * [alias] parameter on the augmented document. * - * @return [SampleStage] with input array and alias specified. + * @return [UnnestStage] with input array and alias specified. */ @JvmStatic fun withField(arrayField: String, alias: String): UnnestStage = @@ -1051,7 +1175,7 @@ internal constructor( } override fun self(options: InternalOptions) = UnnestStage(selectable, options) override fun args(userDataReader: UserDataReader): Sequence = - sequenceOf(encodeValue(selectable.alias), selectable.toProto(userDataReader)) + sequenceOf(selectable.toProto(userDataReader), field(selectable.alias).toProto()) override fun equals(other: Any?): Boolean { if (this === other) return true @@ -1067,6 +1191,24 @@ internal constructor( return result } + /** + * Adds an index field to the output documents. + * + * A field with the name specified in [indexField] will be added to each output document. The + * value of this field is a numeric value that corresponds to the array index of the element from + * the input array. + * + * @param indexField The name of the index field. + * @return A new `UnnestStage` that includes the specified index field. + */ + fun withIndexField(indexField: String): UnnestStage = withOption("index_field", indexField) +} + +class UnnestOptions private constructor(options: InternalOptions) : + AbstractOptions(options) { + /** Creates a new, empty `UnnestOptions` object. */ + constructor() : this(InternalOptions.EMPTY) + /** * Adds index field to emitted documents * @@ -1074,7 +1216,13 @@ internal constructor( * numeric value that corresponds to array index of the element from input array. * * @param indexField The field name of index field. - * @return [SampleStage] that includes specified index field. + * @return A new `UnnestOptions` that includes the specified index field. */ - fun withIndexField(indexField: String): UnnestStage = withOption("index_field", indexField) + fun withIndexField(@Nonnull indexField: String): UnnestOptions { + return with("index_field", Value.newBuilder().setFieldReferenceValue(indexField).build()) + } + + public override fun self(options: InternalOptions): UnnestOptions { + return UnnestOptions(options) + } }