diff --git a/firebase-firestore/CHANGELOG.md b/firebase-firestore/CHANGELOG.md index 29416bcf9a4..7800f5b1c4a 100644 --- a/firebase-firestore/CHANGELOG.md +++ b/firebase-firestore/CHANGELOG.md @@ -1,5 +1,5 @@ # Unreleased - +* [feature] Add support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, BsonTimestamp, and BsonBinaryData. [#6928](//github.com/firebase/firebase-android-sdk/pull/6928) # 25.1.4 * [fixed] Fixed the `null` value handling in `whereNotEqualTo` and `whereNotIn` filters. diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index e3a55cf729c..f14627e2905 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -47,6 +47,25 @@ package com.google.firebase.firestore { method public byte[] toBytes(); } + public final class BsonBinaryData { + method public com.google.protobuf.ByteString dataAsByteString(); + method public byte[] dataAsBytes(); + method public static com.google.firebase.firestore.BsonBinaryData fromByteString(int, com.google.protobuf.ByteString); + method public static com.google.firebase.firestore.BsonBinaryData fromBytes(int, byte[]); + method public int subtype(); + } + + public final class BsonObjectId { + ctor public BsonObjectId(String); + field public final String! value; + } + + public final class BsonTimestamp { + ctor public BsonTimestamp(long, long); + field public final long increment; + field public final long seconds; + } + public class CollectionReference extends com.google.firebase.firestore.Query { method public com.google.android.gms.tasks.Task add(Object); method public com.google.firebase.firestore.DocumentReference document(); @@ -109,6 +128,9 @@ package com.google.firebase.firestore { method public T? get(String, Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public com.google.firebase.firestore.Blob? getBlob(String); method public Boolean? getBoolean(String); + method public com.google.firebase.firestore.BsonBinaryData? getBsonBinaryData(String); + method public com.google.firebase.firestore.BsonObjectId? getBsonObjectId(String); + method public com.google.firebase.firestore.BsonTimestamp? getBsonTimestamp(String); method public java.util.Map? getData(); method public java.util.Map? getData(com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public java.util.Date? getDate(String); @@ -117,9 +139,13 @@ package com.google.firebase.firestore { method public Double? getDouble(String); method public com.google.firebase.firestore.GeoPoint? getGeoPoint(String); method public String getId(); + method public com.google.firebase.firestore.Int32Value? getInt32Value(String); method public Long? getLong(String); + method public com.google.firebase.firestore.MaxKey? getMaxKey(String); method public com.google.firebase.firestore.SnapshotMetadata getMetadata(); + method public com.google.firebase.firestore.MinKey? getMinKey(String); method public com.google.firebase.firestore.DocumentReference getReference(); + method public com.google.firebase.firestore.RegexValue? getRegexValue(String); method public String? getString(String); method public com.google.firebase.Timestamp? getTimestamp(String); method public com.google.firebase.Timestamp? getTimestamp(String, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); @@ -303,6 +329,11 @@ package com.google.firebase.firestore { @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE}) public @interface IgnoreExtraProperties { } + public final class Int32Value { + ctor public Int32Value(int); + field public final int value; + } + public enum ListenSource { enum_constant public static final com.google.firebase.firestore.ListenSource CACHE; enum_constant public static final com.google.firebase.firestore.ListenSource DEFAULT; @@ -360,6 +391,10 @@ package com.google.firebase.firestore { public interface LocalCacheSettings { } + public final class MaxKey { + method public static com.google.firebase.firestore.MaxKey instance(); + } + public final class MemoryCacheSettings implements com.google.firebase.firestore.LocalCacheSettings { method public com.google.firebase.firestore.MemoryGarbageCollectorSettings getGarbageCollectorSettings(); method public static com.google.firebase.firestore.MemoryCacheSettings.Builder newBuilder(); @@ -396,6 +431,10 @@ package com.google.firebase.firestore { enum_constant public static final com.google.firebase.firestore.MetadataChanges INCLUDE; } + public final class MinKey { + method public static com.google.firebase.firestore.MinKey instance(); + } + public interface OnProgressListener { method public void onProgress(ProgressT); } @@ -491,6 +530,12 @@ package com.google.firebase.firestore { method public java.util.List toObjects(Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); } + public final class RegexValue { + ctor public RegexValue(String, String); + field public final String! options; + field public final String! pattern; + } + @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.FIELD}) public @interface ServerTimestamp { } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..989e86311c1 --- /dev/null +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,583 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionWithDocsOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.TestUtil.map; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.firebase.firestore.Query.Direction; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(AndroidJUnit4.class) +public class BsonTypesTest { + + @Test + public void writeAndReadBsonTypes() throws ExecutionException, InterruptedException { + Map expected = new HashMap<>(); + + DocumentReference docRef = + waitFor( + testCollectionOnNightly() + .add( + map( + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance()))); + + waitFor( + docRef.set( + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3)), + SetOptions.merge())); + + waitFor(docRef.update(map("int32", new Int32Value(2)))); + + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(2)); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + + @Test + public void writeAndReadBsonTypeOffline() throws ExecutionException, InterruptedException { + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference docRef = randomColl.document(); + + waitFor(randomColl.getFirestore().disableNetwork()); + + // Adding docs to cache, do not wait for promise to resolve. + Map expected = new HashMap<>(); + docRef.set( + map( + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance())); + + docRef.update( + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3))); + + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(1)); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + + @Test + public void listenToDocumentsWithBsonTypes() throws Throwable { + final Semaphore semaphore = new Semaphore(0); + ListenerRegistration registration = null; + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference ref = randomColl.document(); + AtomicReference failureMessage = new AtomicReference(null); + int totalPermits = 5; + + try { + registration = + randomColl + .whereEqualTo("purpose", "Bson types tests") + .addSnapshotListener( + (value, error) -> { + try { + DocumentSnapshot docSnap = + value.isEmpty() ? null : value.getDocuments().get(0); + + switch (semaphore.availablePermits()) { + case 0: + assertNull(docSnap); + ref.set( + map( + "purpose", + "Bson types tests", + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance())); + break; + case 1: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonBinaryData("bsonBinary"), + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + new BsonObjectId("507f191e810c19729de860ea")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "i")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 2)); + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(1)); + assertEquals(docSnap.getMinKey("minKey"), MinKey.instance()); + assertEquals(docSnap.getMaxKey("maxKey"), MaxKey.instance()); + + ref.set( + map( + "purpose", + "Bson types tests", + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3)), + SetOptions.merge()); + break; + case 2: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + new BsonObjectId("507f191e810c19729de860eb")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "m")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 3)); + + ref.update(map("int32", new Int32Value(2))); + break; + case 3: + assertNotNull(docSnap); + + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(2)); + + ref.delete(); + break; + case 4: + assertNull(docSnap); + break; + } + } catch (Throwable t) { + failureMessage.set(t); + semaphore.release(totalPermits); + } + + semaphore.release(); + }); + + semaphore.acquire(totalPermits); + } finally { + if (registration != null) { + registration.remove(); + } + + if (failureMessage.get() != null) { + throw failureMessage.get(); + } + } + } + + @Test + public void filterAndOrderBsonObjectIds() throws Exception { + Map> docs = + map( + "a", + map("key", new BsonObjectId("507f191e810c19729de860ea")), + "b", + map("key", new BsonObjectId("507f191e810c19729de860eb")), + "c", + map("key", new BsonObjectId("507f191e810c19729de860ec"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderBsonTimestamps() throws Exception { + Map> docs = + map( + "a", + map("key", new BsonTimestamp(1, 1)), + "b", + map("key", new BsonTimestamp(1, 2)), + "c", + map("key", new BsonTimestamp(2, 1))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new BsonTimestamp(1, 1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new BsonTimestamp(1, 2)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderBsonBinaryData() throws Exception { + Map> docs = + map( + "a", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "b", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), + "c", + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderRegex() throws Exception { + Map> docs = + map( + "a", map("key", new RegexValue("^bar", "i")), + "b", map("key", new RegexValue("^bar", "m")), + "c", map("key", new RegexValue("^baz", "i"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new RegexValue("^bar", "i")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new RegexValue("^bar", "m")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderInt32() throws Exception { + Map> docs = + map( + "a", map("key", new Int32Value(-1)), + "b", map("key", new Int32Value(1)), + "c", map("key", new Int32Value(2))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", new Int32Value(1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderMinKey() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", MinKey.instance()), + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // minKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", MinKey.instance()); + + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "a")); + + query = randomColl.whereNotEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "e")); + + query = randomColl.whereGreaterThanOrEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereLessThanOrEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThan("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + } + + @Test + public void filterAndOrderMaxKey() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", 1L), + "c", map("key", MaxKey.instance()), + "d", map("key", MaxKey.instance()), + "e", map("key", null)); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", MaxKey.instance()); + + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "c")); + + query = randomColl.whereNotEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThanOrEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThanOrEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThan("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + } + + @Test + public void filterNullValueWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", null), + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = randomColl.whereEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "c")); + + query = randomColl.whereNotEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend( + randomColl, query, docs, Arrays.asList("a", "d", "e")); + } + + @Test + public void orderBsonTypesTogether() throws Exception { + Map> docs = + map( + "bsonObjectId1", + map("key", new BsonObjectId("507f191e810c19729de860ea")), + "bsonObjectId2", + map("key", new BsonObjectId("507f191e810c19729de860eb")), + "bsonObjectId3", + map("key", new BsonObjectId("407f191e810c19729de860ea")), + "regex1", + map("key", new RegexValue("^bar", "m")), + "regex2", + map("key", new RegexValue("^bar", "i")), + "regex3", + map("key", new RegexValue("^baz", "i")), + "bsonTimestamp1", + map("key", new BsonTimestamp(2, 0)), + "bsonTimestamp2", + map("key", new BsonTimestamp(1, 2)), + "bsonTimestamp3", + map("key", new BsonTimestamp(1, 1)), + "bsonBinary1", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "bsonBinary2", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), + "bsonBinary3", + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2})), + "int32Value1", + map("key", new Int32Value(-1)), + "int32Value2", + map("key", new Int32Value(1)), + "int32Value3", + map("key", new Int32Value(0)), + "minKey1", + map("key", MinKey.instance()), + "minKey2", + map("key", MinKey.instance()), + "maxKey1", + map("key", MaxKey.instance()), + "maxKey2", + map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); + List expectedDocs = + Arrays.asList( + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + "int32Value2", + "int32Value3", + "int32Value1", + "minKey2", + "minKey1"); + + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, expectedDocs); + } + + @Test + public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", new BsonObjectId("507f191e810c19729de860ea")), + "b", map("key", new RegexValue("^foo", "i")), + "c", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + waitFor( + randomColl.firestore.runTransaction( + transaction -> { + DocumentSnapshot docSnap = transaction.get(randomColl.document("a")); + assertEquals( + docSnap.getBsonObjectId("key"), new BsonObjectId("507f191e810c19729de860ea")); + transaction.update(randomColl.document("b"), "key", new RegexValue("^bar", "i")); + transaction.delete(randomColl.document("c")); + return null; + })); + + QuerySnapshot getSnapshot = waitFor(randomColl.get()); + + List getSnapshotDocIds = + getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); + + assertTrue(getSnapshotDocIds.equals(Arrays.asList("a", "b"))); + assertEquals( + getSnapshot.getDocuments().get(0).getBsonObjectId("key"), + new BsonObjectId("507f191e810c19729de860ea")); + assertEquals( + getSnapshot.getDocuments().get(1).getRegexValue("key"), new RegexValue("^bar", "i")); + } +} diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java index 6afbd54b60f..84f0e0f3000 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java @@ -15,6 +15,7 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.AccessHelper.getAsyncQueue; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.checkOnlineAndOfflineResultsMatch; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.isRunningAgainstEmulator; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.newTestSettings; @@ -64,7 +65,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; -import java.util.stream.Collectors; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; @@ -1497,28 +1497,27 @@ public void testCanGetSameOrDifferentPersistentCacheIndexManager() { } @Test - public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); - List expectedDocIds = + + List expectedDocs = Arrays.asList( "__id-9223372036854775808__", "__id-2__", @@ -1534,106 +1533,61 @@ public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { "_id1__", "a"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocs); } @Test - public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query filteredQuery = colRef .whereGreaterThan(FieldPath.documentId(), "__id7__") .whereLessThanOrEqualTo(FieldPath.documentId(), "A") .orderBy(FieldPath.documentId()); - List expectedDocIds = - Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - - QuerySnapshot getSnapshot = waitFor(filteredQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - filteredQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocs = + Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(colRef, filteredQuery, testDocs, expectedDocs); } @Test public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - // Test query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "__id-9223372036854775808__", @@ -1655,7 +1609,7 @@ public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { } @Test - public void snapshotListenerSortsUnicodeStringsAsServer() { + public void snapshotListenerSortsUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1683,36 +1637,15 @@ public void snapshotListenerSortsUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { + public void snapshotListenerSortsUnicodeStringsInArrayAsServer() throws Exception { Map> testDocs = map( "a", @@ -1740,36 +1673,15 @@ public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapAsServer() throws Exception { Map> testDocs = map( "a", @@ -1797,36 +1709,15 @@ public void snapshotListenerSortsUnicodeStringsInMapAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() throws Exception { Map> testDocs = map( "a", @@ -1854,36 +1745,15 @@ public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() throws Exception { Map> testDocs = map( "Łukasiewicz", @@ -1911,38 +1781,16 @@ public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "Sierpiński", "Łukasiewicz", "你好", "你顥", "岩澤", "︒", "P", "🄟", "🐵", "😀", "😁"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { - // Note: Protocol Buffer converts any invalid surrogates to "?". + public void snapshotListenerSortsInvalidUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1962,30 +1810,27 @@ public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); - List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + // Note: Protocol Buffer converts any invalid surrogates to "?". + Map> actualDocs = + map( + "a", + map("value", "Z"), + "b", + map("value", "你好"), + "c", + map("value", "😀"), + "d", + map("value", "ab?"), + "e", + map("value", "ab?"), + "f", + map("value", "ab??"), + "g", + map("value", "ab??")); - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, actualDocs, expectedDocIds); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 8cceddb7188..52cc1d3892f 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -15,7 +15,9 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocument; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; import static com.google.firebase.firestore.testutil.TestUtil.expectError; import static com.google.firebase.firestore.testutil.TestUtil.map; @@ -47,6 +49,13 @@ public static final class POJO { Blob blob; GeoPoint geoPoint; DocumentReference documentReference; + BsonObjectId bsonObjectId; + BsonBinaryData bsonBinaryData; + BsonTimestamp bsonTimestamp; + RegexValue regexValue; + Int32Value int32Value; + MinKey minKey; + MaxKey maxKey; public POJO() {} @@ -60,6 +69,13 @@ public POJO(double number, String str, DocumentReference documentReference) { this.timestamp = new Timestamp(123, 123456000); this.blob = Blob.fromBytes(new byte[] {3, 1, 4, 1, 5}); this.geoPoint = new GeoPoint(3.1415, 9.2653); + this.bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + this.bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {3, 1, 4, 1, 5}); + this.bsonTimestamp = new BsonTimestamp(1, 2); + this.regexValue = new RegexValue("^foo", "i"); + this.int32Value = new Int32Value(1); + this.minKey = MinKey.instance(); + this.maxKey = MaxKey.instance(); } public double getNumber() { @@ -118,6 +134,62 @@ public void setDocumentReference(DocumentReference documentReference) { this.documentReference = documentReference; } + public BsonObjectId getBsonObjectId() { + return bsonObjectId; + } + + public void setBsonObjectId(BsonObjectId bsonObjectId) { + this.bsonObjectId = bsonObjectId; + } + + public BsonBinaryData getBsonBinaryData() { + return bsonBinaryData; + } + + public void setBsonBinaryData(BsonBinaryData bsonBinaryData) { + this.bsonBinaryData = bsonBinaryData; + } + + public BsonTimestamp getBsonTimestamp() { + return bsonTimestamp; + } + + public void setBsonTimestamp(BsonTimestamp bsonTimestamp) { + this.bsonTimestamp = bsonTimestamp; + } + + public RegexValue getRegexValue() { + return regexValue; + } + + public void setRegexValue(RegexValue regexValue) { + this.regexValue = regexValue; + } + + public Int32Value getInt32Value() { + return int32Value; + } + + public void setInt32Value(Int32Value int32Value) { + this.int32Value = int32Value; + } + + public MinKey getMinKey() { + return minKey; + } + + public void setMinKey(MinKey minKey) { + this.minKey = minKey; + } + + public MaxKey getMaxKey() { + return maxKey; + } + + public void setMaxKey(MaxKey maxKey) { + this.maxKey = maxKey; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -147,6 +219,27 @@ public boolean equals(Object o) { if (!geoPoint.equals(pojo.geoPoint)) { return false; } + if (!bsonBinaryData.equals(pojo.bsonBinaryData)) { + return false; + } + if (!bsonTimestamp.equals(pojo.bsonTimestamp)) { + return false; + } + if (!bsonObjectId.equals(pojo.bsonObjectId)) { + return false; + } + if (!regexValue.equals(pojo.regexValue)) { + return false; + } + if (!int32Value.equals(pojo.int32Value)) { + return false; + } + if (!minKey.equals(pojo.minKey)) { + return false; + } + if (!maxKey.equals(pojo.maxKey)) { + return false; + } // TODO: Implement proper equality on DocumentReference. return documentReference.getPath().equals(pojo.documentReference.getPath()); @@ -164,6 +257,13 @@ public int hashCode() { result = 31 * result + blob.hashCode(); result = 31 * result + geoPoint.hashCode(); result = 31 * result + documentReference.getPath().hashCode(); + result = 31 * result + bsonObjectId.hashCode(); + result = 31 * result + bsonBinaryData.hashCode(); + result = 31 * result + bsonTimestamp.hashCode(); + result = 31 * result + regexValue.hashCode(); + result = 31 * result + int32Value.hashCode(); + result = 31 * result + minKey.hashCode(); + result = 31 * result + maxKey.hashCode(); return result; } } @@ -236,7 +336,7 @@ public void tearDown() { @Test public void testWriteAndRead() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -258,9 +358,10 @@ public void testDocumentIdAnnotation() { assertEquals(reference.getId(), readFromStore.getDocReferenceId()); } + // TODO(Mila/BSON): this test is flaky due to a bug in the backend. @Test public void testSetMerge() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -277,7 +378,7 @@ public void testSetMerge() { // General smoke test that makes sure APIs accept POJOs. @Test public void testAPIsAcceptPOJOsForFields() { - DocumentReference ref = testDocument(); + DocumentReference ref = testDocumentOnNightly(); ArrayList> tasks = new ArrayList<>(); // as Map<> entries in a set() call. @@ -296,7 +397,7 @@ public void testAPIsAcceptPOJOsForFields() { // as Query parameters. data.setBlob(null); // blobs are broken, see b/117680212 - tasks.add(testCollection().whereEqualTo("field", data).get()); + tasks.add(testCollectionOnNightly().whereEqualTo("field", data).get()); waitFor(Tasks.whenAll(tasks)); } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index 664c2207843..13ec0646f5a 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -14,17 +14,23 @@ package com.google.firebase.firestore; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.writeTestDocsOnCollection; import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.map; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.firebase.Timestamp; import com.google.firebase.firestore.testutil.IntegrationTestUtil; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -87,9 +93,201 @@ public void testCanReadAndWriteDates() { verifySuccessfulWriteReadCycle(map("date", new Timestamp(date)), testDoc()); } + @Test + public void testCanReadAndWriteDocumentReferences() { + DocumentReference docRef = testDoc(); + Map data = map("a", 42L, "ref", docRef); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteDocumentReferencesInLists() { + DocumentReference docRef = testDoc(); + List refs = Collections.singletonList(docRef); + Map data = map("a", 42L, "refs", refs); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteMinKey() { + verifySuccessfulWriteReadCycle(map("minKey", MinKey.instance()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteMaxKey() { + verifySuccessfulWriteReadCycle(map("maxKey", MaxKey.instance()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteRegexValue() { + verifySuccessfulWriteReadCycle( + map("regex", new RegexValue("^foo", "i")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteInt32Value() { + verifySuccessfulWriteReadCycle(map("int32", new Int32Value(1)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTimestampValue() { + verifySuccessfulWriteReadCycle( + map("bsonTimestamp", new BsonTimestamp(1, 2)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonObjectIdValue() { + verifySuccessfulWriteReadCycle( + map("bsonObjectId", new BsonObjectId("507f191e810c19729de860ea")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonBinaryValue() { + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromByteString(255, ByteString.EMPTY)), + testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInLists() { + List data = + Arrays.asList( + new BsonObjectId("507f191e810c19729de860ea"), + new RegexValue("^foo", "i"), + new BsonTimestamp(1, 2), + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + new Int32Value(1), + MinKey.instance(), + MaxKey.instance()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInMaps() { + Map data = + map( + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void invalidRegexGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new RegexValue("foo", "a")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'")); + } + + @Test + public void invalidBsonObjectIdGetsRejected() throws Exception { + Exception error = null; + try { + // bsonObjectId with length not equal to 24 gets rejected + waitFor(testDocumentOnNightly().set(map("key", new BsonObjectId("foobar")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Object ID hex string has incorrect length.")); + } + + @Test + public void invalidBsonBinaryDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor( + testDocumentOnNightly() + .set(map("key", BsonBinaryData.fromBytes(1234, new byte[] {1, 2, 3})))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range.")); + } + + @Test + public void invalidBsonTimestampDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(-1, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(4294967296L, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (4294967296) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, -1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, 4294967296L)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (4294967296) does not represent an unsigned 32-bit integer.")); + } + @Test public void testCanUseTypedAccessors() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map( "null", @@ -111,7 +309,27 @@ public void testCanUseTypedAccessors() { "timestamp", new Timestamp(100, 123000000), "reference", - doc); + doc, + "array", + Arrays.asList(1.0, "foo", map("nested", true), null), + "map", + map("key", true), + "vector", + FieldValue.vector(new double[] {1, 2, 3}), + "regex", + new RegexValue("^foo", "i"), + "int32", + new Int32Value(1), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance()); waitFor(doc.set(data)); DocumentSnapshot snapshot = waitFor(doc.get()); @@ -132,11 +350,21 @@ public void testCanUseTypedAccessors() { assertEquals(timestamp.toDate(), snapshot.getDate("timestamp")); assertTrue(data.get("reference") instanceof DocumentReference); assertEquals(((DocumentReference) data.get("reference")).getPath(), doc.getPath()); + assertEquals(data.get("array"), snapshot.get("array")); + assertEquals(data.get("map"), snapshot.get("map")); + assertEquals(data.get("vector"), snapshot.getVectorValue("vector")); + assertEquals(data.get("regex"), snapshot.getRegexValue("regex")); + assertEquals(data.get("int32"), snapshot.getInt32Value("int32")); + assertEquals(data.get("bsonTimestamp"), snapshot.getBsonTimestamp("bsonTimestamp")); + assertEquals(data.get("bsonObjectId"), snapshot.getBsonObjectId("bsonObjectId")); + assertEquals(data.get("bsonBinary"), snapshot.getBsonBinaryData("bsonBinary")); + assertEquals(data.get("minKey"), snapshot.getMinKey("minKey")); + assertEquals(data.get("maxKey"), snapshot.getMaxKey("maxKey")); } @Test public void testTypeAccessorsCanReturnNull() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map(); waitFor(doc.set(data)); @@ -153,20 +381,93 @@ public void testTypeAccessorsCanReturnNull() { assertNull(snapshot.getDate("missing")); assertNull(snapshot.getTimestamp("missing")); assertNull(snapshot.getDocumentReference("missing")); + assertNull(snapshot.getVectorValue("missing")); + assertNull(snapshot.getRegexValue("missing")); + assertNull(snapshot.getInt32Value("missing")); + assertNull(snapshot.getBsonTimestamp("missing")); + assertNull(snapshot.getBsonObjectId("missing")); + assertNull(snapshot.getBsonBinaryData("missing")); + assertNull(snapshot.getMinKey("missing")); + assertNull(snapshot.getMaxKey("missing")); } @Test - public void testCanReadAndWriteDocumentReferences() { - DocumentReference docRef = testDoc(); - Map data = map("a", 42L, "ref", docRef); - verifySuccessfulWriteReadCycle(data, docRef); - } + public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { + CollectionReference colRef = testCollectionOnNightly(); + // Document reference needs to be created first to make sure it is using the same firestore + // instance in creation + DocumentReference docRef = colRef.document("testDocRef"); - @Test - public void testCanReadAndWriteDocumentReferencesInLists() { - DocumentReference docRef = testDoc(); - List refs = Collections.singletonList(docRef); - Map data = map("a", 42L, "refs", refs); - verifySuccessfulWriteReadCycle(data, docRef); + Map> testDocs = + map( + "null", + map("value", null), + "min", + map("value", MinKey.instance()), + "boolean", + map("value", true), + "nan", + map("value", Double.NaN), + "int32", + map("value", new Int32Value(1)), + "double", + map("value", 1.0), + "int", + map("value", 1L), + "timestamp", + map("value", new Timestamp(100, 123000000)), + "bsonTimestamp", + map("value", new BsonTimestamp(1, 2)), + "string", + map("value", "a"), + "bytes", + map("value", blob(1, 2, 3)), + "bsonBinary", + map("value", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "reference", + map("value", docRef), + "bsonObjectId", + map("value", new BsonObjectId("507f191e810c19729de860ea")), + "geoPoint", + map("value", new GeoPoint(1.23, 4.56)), + "regex", + map("value", new RegexValue("^foo", "i")), + "array", + map("value", Arrays.asList(1.0, "foo", map("key", true), null)), + "vector", + map("value", FieldValue.vector(new double[] {1, 2, 3})), + "map", + map("value", map("key", true)), + "max", + map("value", MaxKey.instance())); + + writeTestDocsOnCollection(colRef, testDocs); + + Query orderedQuery = colRef.orderBy("value"); + List expectedDocs = + Arrays.asList( + "null", + "min", + "boolean", + "nan", + "double", + "int", + "int32", + "timestamp", + "bsonTimestamp", + "string", + "bytes", + "bsonBinary", + "reference", + "bsonObjectId", + "geoPoint", + "regex", + "array", + "vector", + "map", + "max"); + + // Assert that get and snapshot listener requests sort docs in the same, expected order + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocs); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java index dd676b5f0ab..be27c7a5200 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java @@ -18,7 +18,9 @@ import static com.google.firebase.firestore.util.Util.autoId; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import android.content.Context; import android.os.StrictMode; @@ -205,6 +207,36 @@ public static FirebaseFirestore testFirestore() { return testFirestore(newTestSettings()); } + public static FirebaseFirestore testFirestoreOnNightly() { + FirebaseFirestoreSettings settings = + new FirebaseFirestoreSettings.Builder() + .setHost("test-firestore.sandbox.googleapis.com") + .setSslEnabled(true) + .build(); + + DatabaseId databaseId = DatabaseId.forDatabase("firestore-sdk-nightly", "(default)"); + + String persistenceKey = "db" + firestoreStatus.size(); + + return testFirestore(databaseId, Level.DEBUG, settings, persistenceKey); + } + + public static CollectionReference testCollectionOnNightly() { + return testFirestoreOnNightly().collection(autoId()); + } + + public static DocumentReference testDocumentOnNightly() { + return testCollectionOnNightly().document(); + } + + public static CollectionReference testCollectionWithDocsOnNightly( + Map> docs) { + CollectionReference collection = testCollectionOnNightly(); + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + return collection; + } + /** * Initializes a new Firestore instance that uses the default project, customized with the * provided settings. @@ -366,6 +398,12 @@ public static CollectionReference testCollectionWithDocs(Map> docs) { + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + } + public static void writeAllDocs( CollectionReference collection, Map> docs) { WriteBatch writeBatch = null; @@ -561,4 +599,69 @@ public static void checkOnlineAndOfflineResultsMatch( assertEquals(expectedDocIds, querySnapshotToIds(docsFromServer)); } } + + // Asserts that the given query produces the expected result for all of the + // following scenarios: + // 1. Performing the given query using source=server, compare with expected result and populate + // cache. + // 2. Performing the given query using source=cache, compare with server result and expected + // result. + // 3. Using a snapshot listener to raise snapshots from cache and server, compare them with + // expected result. + public static void assertSDKQueryResultsConsistentWithBackend( + Query collection, + Query query, + Map> allData, + List expectedDocIds) + throws Exception { + // Check the cache round trip first to make sure cache is properly populated, otherwise the + // snapshot listener below will return partial results from previous + // "assertSDKQueryResultsConsistentWithBackend" calls if it is called multiple times in one test + checkOnlineAndOfflineResultsMatch(collection, query, expectedDocIds.toArray(new String[0])); + + EventAccumulator eventAccumulator = new EventAccumulator<>(); + ListenerRegistration registration = + query.addSnapshotListener(MetadataChanges.INCLUDE, eventAccumulator.listener()); + List watchSnapshots; + try { + watchSnapshots = eventAccumulator.await(2); + } finally { + registration.remove(); + } + assertTrue(watchSnapshots.get(0).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(0), allData, expectedDocIds); + assertFalse(watchSnapshots.get(1).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(1), allData, expectedDocIds); + } + + public static void verifySnapshot( + QuerySnapshot snapshot, + Map> allData, + List expectedDocIds) { + List snapshotDocIds = querySnapshotToIds(snapshot); + assertEquals( + String.format( + "Did not get the same document size. Expected doc size: %d, Actual doc size: %d ", + expectedDocIds.size(), snapshotDocIds.size()), + expectedDocIds.size(), + snapshotDocIds.size()); + assertTrue( + String.format( + "Did not get the expected document IDs. Expected doc IDs: %s, Actual doc IDs: %s ", + expectedDocIds, snapshotDocIds), + expectedDocIds.equals(snapshotDocIds)); + + Map actualDocs = toDataMap(snapshot); + + for (String docId : expectedDocIds) { + Map expectedDoc = allData.get(docId); + Map actualDoc = (Map) actualDocs.get(docId); + + assertTrue( + String.format( + "Did not get the expected document content. Expected doc: %s, Actual doc: %s ", + expectedDoc, actualDoc), + expectedDoc.equals(actualDoc)); + } + } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java new file mode 100644 index 00000000000..19e64892012 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java @@ -0,0 +1,118 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import com.google.protobuf.ByteString; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** Represents a BSON Binary data type in Firestore documents. */ +public final class BsonBinaryData { + private final int subtype; + private final ByteString data; + + private BsonBinaryData(int subtype, @Nonnull ByteString data) { + // By definition the subtype should be 1 byte and should therefore + // have a value between 0 and 255 + if (subtype < 0 || subtype > 255) { + throw new IllegalArgumentException( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range."); + } + this.subtype = subtype; + this.data = data; + } + + /** + * Creates a new BsonBinaryData instance from the provided ByteString and subtype. + * + * @param subtype The subtype to use for this instance. + * @param byteString The byteString to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromByteString(int subtype, @Nonnull ByteString byteString) { + return new BsonBinaryData(subtype, byteString); + } + + /** + * Creates a new BsonBinaryData instance from the provided bytes and subtype. Makes a copy of the + * bytes passed in. + * + * @param subtype The subtype to use for this instance. + * @param bytes The bytes to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromBytes(int subtype, @Nonnull byte[] bytes) { + return new BsonBinaryData(subtype, ByteString.copyFrom(bytes)); + } + + /** + * Returns the underlying data as a ByteString. + * + * @return The data as a ByteString. + */ + @Nonnull + public ByteString dataAsByteString() { + return data; + } + + /** + * Returns a copy of the underlying data as a byte[] array. + * + * @return The data as a byte[] array. + */ + @Nonnull + public byte[] dataAsBytes() { + return data.toByteArray(); + } + + /** + * Returns the subtype of this binary data. + * + * @return The subtype of the binary data. + */ + public int subtype() { + return subtype; + } + + /** + * Returns true if this BsonBinaryData is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonBinaryData is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonBinaryData)) { + return false; + } + BsonBinaryData other = (BsonBinaryData) obj; + return subtype == other.subtype && data.equals(other.data); + } + + @Override + public int hashCode() { + return Objects.hash(subtype, data); + } + + @Override + public String toString() { + return "BsonBinaryData{subtype=" + subtype + ", data=" + data + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java new file mode 100644 index 00000000000..2e7e12c3ad8 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java @@ -0,0 +1,59 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a BSON ObjectId type in Firestore documents. */ +public final class BsonObjectId { + public final String value; + + /** + * Constructor that creates a new BSON ObjectId value with the given value. + * + * @param oid The 24-character hex string representing the ObjectId. + */ + public BsonObjectId(@NonNull String oid) { + this.value = oid; + } + + /** + * Returns true if this BsonObjectId is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonObjectId is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonObjectId)) { + return false; + } + BsonObjectId other = (BsonObjectId) obj; + return value.equals(other.value); + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + return "BsonObjectId{value='" + value + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java new file mode 100644 index 00000000000..394a46cab84 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java @@ -0,0 +1,72 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a BSON Timestamp type in Firestore documents. */ +public final class BsonTimestamp { + public final long seconds; + public final long increment; + + /** + * Constructor that creates a new BSON Timestamp value with the given values. + * + * @param seconds An unsigned 32-bit integer value stored as long representing the seconds. + * @param increment An unsigned 32-bit integer value stored as long representing the increment. + */ + public BsonTimestamp(long seconds, long increment) { + if (seconds < 0 || seconds > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'seconds' value (%s) does not represent an unsigned 32-bit integer.", + seconds)); + } + if (increment < 0 || increment > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'increment' value (%s) does not represent an unsigned 32-bit integer.", + increment)); + } + this.seconds = seconds; + this.increment = increment; + } + + /** + * Returns true if this BsonTimestampValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonTimestampValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonTimestamp)) { + return false; + } + BsonTimestamp other = (BsonTimestamp) obj; + return seconds == other.seconds && increment == other.increment; + } + + @Override + public int hashCode() { + return (int) (31 * seconds + increment); + } + + @Override + public String toString() { + return "BsonTimestampValue{seconds=" + seconds + ", increment=" + increment + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java index 4540608fc48..9fc60f674be 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java @@ -497,6 +497,90 @@ public VectorValue getVectorValue(@NonNull String field) { return (VectorValue) get(field); } + /** + * Returns the value of the field as a MinKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MinKey. + * @return The value of the field. + */ + @Nullable + public MinKey getMinKey(@NonNull String field) { + return (MinKey) get(field); + } + + /** + * Returns the value of the field as a MaxKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MaxKey. + * @return The value of the field. + */ + @Nullable + public MaxKey getMaxKey(@NonNull String field) { + return (MaxKey) get(field); + } + + /** + * Returns the value of the field as a RegexValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a RegexValue. + * @return The value of the field. + */ + @Nullable + public RegexValue getRegexValue(@NonNull String field) { + return (RegexValue) get(field); + } + + /** + * Returns the value of the field as a 32-bit integer. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a Int32Value. + * @return The value of the field. + */ + @Nullable + public Int32Value getInt32Value(@NonNull String field) { + return (Int32Value) get(field); + } + + /** + * Returns the value of the field as a BsonObjectId. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonObjectId. + * @return The value of the field. + */ + @Nullable + public BsonObjectId getBsonObjectId(@NonNull String field) { + return (BsonObjectId) get(field); + } + + /** + * Returns the value of the field as a BsonTimestampValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonTimestampValue. + * @return The value of the field. + */ + @Nullable + public BsonTimestamp getBsonTimestamp(@NonNull String field) { + return (BsonTimestamp) get(field); + } + + /** + * Returns the value of the field as a BsonBinaryData. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonBinaryData. + * @return The value of the field. + */ + @Nullable + public BsonBinaryData getBsonBinaryData(@NonNull String field) { + return (BsonBinaryData) get(field); + } + @Nullable private T getTypedValue(String field, Class clazz) { checkNotNull(field, "Provided field must not be null."); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java index 48f67e50e12..f899457acdb 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java @@ -48,7 +48,7 @@ String getMethodName() { } } - /* {@code FieldValue} class for {@link #arrayUnion()} transforms. */ + /** {@code FieldValue} class for {@link #arrayUnion()} transforms. */ static class ArrayUnionFieldValue extends FieldValue { private final List elements; @@ -66,7 +66,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #arrayRemove()} transforms. */ + /** {@code FieldValue} class for {@link #arrayRemove()} transforms. */ static class ArrayRemoveFieldValue extends FieldValue { private final List elements; @@ -84,7 +84,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #increment()} transforms. */ + /** {@code FieldValue} class for {@link #increment()} transforms. */ static class NumericIncrementFieldValue extends FieldValue { private final Number operand; diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java new file mode 100644 index 00000000000..edcc47c3964 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java @@ -0,0 +1,52 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a 32-bit integer type in Firestore documents. */ +public final class Int32Value { + public final int value; + + public Int32Value(int value) { + this.value = value; + } + + /** + * Returns true if this Int32Value is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this Int32Value is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof Int32Value)) { + return false; + } + Int32Value other = (Int32Value) obj; + return value == other.value; + } + + @Override + public int hashCode() { + return value; + } + + @Override + public String toString() { + return "Int32Value{value=" + value + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java new file mode 100644 index 00000000000..4d43dae7f1d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Max Key" data type. */ +public final class MaxKey { + private static final MaxKey INSTANCE = new MaxKey(); + + private MaxKey() {} + + @NonNull + public static MaxKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MaxKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MaxKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MaxKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java new file mode 100644 index 00000000000..e815d1fd3e1 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Min Key" data type. */ +public final class MinKey { + private static final MinKey INSTANCE = new MinKey(); + + private MinKey() {} + + @NonNull + public static MinKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MinKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MinKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MinKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java new file mode 100644 index 00000000000..1af0ce1f04d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java @@ -0,0 +1,56 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a regular expression type in Firestore documents. */ +public final class RegexValue { + public final String pattern; + public final String options; + + public RegexValue(@NonNull String pattern, @NonNull String options) { + this.pattern = pattern; + this.options = options; + } + + /** + * Returns true if this RegexValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this RegexValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof RegexValue)) { + return false; + } + RegexValue other = (RegexValue) obj; + return pattern.equals(other.pattern) && options.equals(other.options); + } + + @Override + public int hashCode() { + return 31 * pattern.hashCode() + options.hashCode(); + } + + @Override + public String toString() { + return "RegexValue{pattern='" + pattern + "', options='" + options + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java index 297479d0262..347677bd563 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java @@ -43,6 +43,7 @@ import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.NullValue; import com.google.type.LatLng; import java.util.ArrayList; @@ -443,6 +444,22 @@ private Value parseScalarValue(Object input, ParseContext context) { .build(); } else if (input instanceof VectorValue) { return parseVectorValue(((VectorValue) input), context); + + } else if (input instanceof MinKey) { + return parseMinKey(); + } else if (input instanceof MaxKey) { + return parseMaxKey(); + } else if (input instanceof BsonObjectId) { + return parseBsonObjectId((BsonObjectId) input); + } else if (input instanceof BsonTimestamp) { + return parseBsonTimestamp((BsonTimestamp) input); + } else if (input instanceof BsonBinaryData) { + return parseBsonBinary((BsonBinaryData) input); + } else if (input instanceof RegexValue) { + return parseRegexValue((RegexValue) input); + } else if (input instanceof Int32Value) { + return parseInteger32Value((Int32Value) input); + } else if (input.getClass().isArray()) { throw context.createError("Arrays are not supported; use a List instead"); } else { @@ -459,6 +476,80 @@ private Value parseVectorValue(VectorValue vector, ParseContext context) { return Value.newBuilder().setMapValue(mapBuilder).build(); } + private Value parseMinKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MIN_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseMaxKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MAX_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonObjectId(BsonObjectId objectId) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_OBJECT_ID_KEY, + Value.newBuilder().setStringValue((String) objectId.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonTimestamp(BsonTimestamp timestamp) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(timestamp.seconds).build()); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(timestamp.increment).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonBinary(BsonBinaryData binary) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder() + .setBytesValue( + ByteString.copyFrom(new byte[] {(byte) binary.subtype()}) + .concat(binary.dataAsByteString())) + .build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseRegexValue(RegexValue regex) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue(regex.pattern).build()); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue(regex.options).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_REGEX_KEY, Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseInteger32Value(Int32Value int32) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(int32.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + private Value parseTimestamp(Timestamp timestamp) { // Firestore backend truncates precision down to microseconds. To ensure offline mode works // the same with regards to truncation, perform the truncation immediately without waiting for diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java index d6ac7b90bba..2ad4af8d2c0 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java @@ -19,17 +19,24 @@ import static com.google.firebase.firestore.model.Values.TYPE_ORDER_ARRAY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BLOB; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BOOLEAN; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_BINARY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_OBJECT_ID; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_GEOPOINT; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAP; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAX_KEY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MIN_KEY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NULL; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NUMBER; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REFERENCE; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REGEX; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_SERVER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_STRING; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_VECTOR; import static com.google.firebase.firestore.model.Values.typeOrder; import static com.google.firebase.firestore.util.Assert.fail; +import static com.google.firestore.v1.Value.ValueTypeCase.MAP_VALUE; import androidx.annotation.RestrictTo; import com.google.firebase.Timestamp; @@ -39,6 +46,7 @@ import com.google.firebase.firestore.util.Logger; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -78,6 +86,9 @@ public Object convertValue(Value value) { case TYPE_ORDER_BOOLEAN: return value.getBooleanValue(); case TYPE_ORDER_NUMBER: + if (value.getValueTypeCase() == MAP_VALUE) { + return convertInt32(value.getMapValue().getFieldsMap()); + } return value.getValueTypeCase().equals(Value.ValueTypeCase.INTEGER_VALUE) ? (Object) value.getIntegerValue() // Cast to Object to prevent type coercion to double : (Object) value.getDoubleValue(); @@ -90,6 +101,19 @@ public Object convertValue(Value value) { value.getGeoPointValue().getLatitude(), value.getGeoPointValue().getLongitude()); case TYPE_ORDER_VECTOR: return convertVectorValue(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_OBJECT_ID: + return convertBsonObjectId(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_TIMESTAMP: + return convertBsonTimestamp(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_BINARY: + return convertBsonBinary(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_REGEX: + return convertRegex(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_MAX_KEY: + return MaxKey.instance(); + case TYPE_ORDER_MIN_KEY: + return MinKey.instance(); + default: throw fail("Unknown value type: " + value.getValueTypeCase()); } @@ -115,6 +139,43 @@ VectorValue convertVectorValue(Map mapValue) { return new VectorValue(doubles); } + BsonObjectId convertBsonObjectId(Map mapValue) { + return new BsonObjectId(mapValue.get(Values.RESERVED_OBJECT_ID_KEY).getStringValue()); + } + + BsonTimestamp convertBsonTimestamp(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + return new BsonTimestamp( + fields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(), + fields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue()); + } + + BsonBinaryData convertBsonBinary(Map mapValue) { + ByteString bytes = mapValue.get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue(); + // Note: A byte is interpreted as a signed 8-bit value. Since values larger than 127 have a + // leading '1' bit, simply casting them to integer results in sign-extension and lead to a + // negative integer value. For example, the byte `0x80` casted to `int` results in `-128`, + // rather than `128`, and the byte `0xFF` casted to `int` will be `-1` rather than `255`. + // Since we want the `subtype` to be an unsigned byte, we need to perform 0-extension (rather + // than sign-extension) to convert it to an int. + int subtype = bytes.byteAt(0) & 0xFF; + return BsonBinaryData.fromByteString(subtype, bytes.substring(1)); + } + + RegexValue convertRegex(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + return new RegexValue( + fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue(), + fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + } + + Int32Value convertInt32(Map mapValue) { + return new Int32Value((int) mapValue.get(Values.RESERVED_INT32_KEY).getIntegerValue()); + } + private Object convertServerTimestamp(Value serverTimestampValue) { switch (serverTimestampBehavior) { case PREVIOUS: diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java index d058e15659e..75879d77b3e 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java @@ -16,8 +16,8 @@ import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS; import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS_ANY; -import static com.google.firebase.firestore.model.Values.MAX_VALUE; -import static com.google.firebase.firestore.model.Values.MIN_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MAX_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MIN_VALUE; import static com.google.firebase.firestore.model.Values.lowerBoundCompare; import static com.google.firebase.firestore.model.Values.upperBoundCompare; @@ -184,7 +184,7 @@ private List getFieldFiltersForPath(FieldPath path) { /** * Returns a lower bound of field values that can be used as a starting point to scan the index - * defined by {@code fieldIndex}. Returns {@link Values#MIN_VALUE} if no lower bound exists. + * defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MIN_VALUE} if no lower bound exists. */ public Bound getLowerBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -206,7 +206,7 @@ public Bound getLowerBound(FieldIndex fieldIndex) { /** * Returns an upper bound of field values that can be used as an ending point when scanning the - * index defined by {@code fieldIndex}. Returns {@link Values#MAX_VALUE} if no upper bound exists. + * index defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MAX_VALUE} if no upper bound exists. */ public Bound getUpperBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -235,12 +235,12 @@ public Bound getUpperBound(FieldIndex fieldIndex) { */ private Pair getAscendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MIN_VALUE; + Value segmentValue = INTERNAL_MIN_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MIN_VALUE; + Value filterValue = INTERNAL_MIN_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -259,7 +259,7 @@ private Pair getAscendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MIN_VALUE; + filterValue = Values.MIN_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. @@ -300,12 +300,12 @@ private Pair getAscendingBound( */ private Pair getDescendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MAX_VALUE; + Value segmentValue = INTERNAL_MAX_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MAX_VALUE; + Value filterValue = INTERNAL_MAX_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -325,7 +325,7 @@ private Pair getDescendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MAX_VALUE; + filterValue = Values.MAX_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java index f275634957a..87a0ef0221b 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java @@ -19,31 +19,41 @@ import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import com.google.type.LatLng; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** Firestore index value writer. */ public class FirestoreIndexValueWriter { - // Note: This code is copied from the backend. Code that is not used by Firestore was removed. + // Note: This file is copied from the backend. Code that is not used by + // Firestore was removed. Code that has different behavior was modified. // The client SDK only supports references to documents from the same database. We can skip the // first five segments. public static final int DOCUMENT_NAME_OFFSET = 5; public static final int INDEX_TYPE_NULL = 5; + public static final int INDEX_TYPE_MIN_KEY = 7; public static final int INDEX_TYPE_BOOLEAN = 10; public static final int INDEX_TYPE_NAN = 13; public static final int INDEX_TYPE_NUMBER = 15; public static final int INDEX_TYPE_TIMESTAMP = 20; + public static final int INDEX_TYPE_BSON_TIMESTAMP = 22; public static final int INDEX_TYPE_STRING = 25; public static final int INDEX_TYPE_BLOB = 30; + public static final int INDEX_TYPE_BSON_BINARY = 31; public static final int INDEX_TYPE_REFERENCE = 37; + public static final int INDEX_TYPE_BSON_OBJECT_ID = 43; public static final int INDEX_TYPE_GEOPOINT = 45; + public static final int INDEX_TYPE_REGEX = 47; public static final int INDEX_TYPE_ARRAY = 50; public static final int INDEX_TYPE_VECTOR = 53; public static final int INDEX_TYPE_MAP = 55; public static final int INDEX_TYPE_REFERENCE_SEGMENT = 60; + public static final int INDEX_TYPE_MAX_KEY = 999; // A terminator that indicates that a truncatable value was not truncated. // This must be smaller than all other type labels. @@ -112,13 +122,43 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeDouble(geoPoint.getLongitude()); break; case MAP_VALUE: - if (Values.isMaxValue(indexValue)) { + Values.MapRepresentation mapType = Values.detectMapRepresentation(indexValue); + if (mapType.equals(Values.MapRepresentation.INTERNAL_MAX)) { writeValueTypeLabel(encoder, Integer.MAX_VALUE); break; - } else if (Values.isVectorValue(indexValue)) { + } else if (mapType.equals(Values.MapRepresentation.VECTOR)) { writeIndexVector(indexValue.getMapValue(), encoder); break; + } else if (mapType.equals(Values.MapRepresentation.REGEX)) { + writeIndexRegex(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_TIMESTAMP)) { + writeIndexBsonTimestamp(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_OBJECT_ID)) { + writeIndexBsonObjectId(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_BINARY)) { + writeIndexBsonBinaryData(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.INT32)) { + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + // Double and Int32 sort the same + encoder.writeDouble( + indexValue + .getMapValue() + .getFieldsMap() + .get(Values.RESERVED_INT32_KEY) + .getIntegerValue()); + break; + } else if (mapType.equals(Values.MapRepresentation.MIN_KEY)) { + writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); + break; + } else if (mapType.equals(Values.MapRepresentation.MAX_KEY)) { + writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); + break; } + writeIndexMap(indexValue.getMapValue(), encoder); writeTruncationMarker(encoder); break; @@ -157,6 +197,53 @@ private void writeIndexVector(MapValue mapIndexValue, DirectionalIndexByteEncode this.writeIndexValueAux(map.get(key), encoder); } + private void writeIndexRegex(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_REGEX); + + Map fields = + mapIndexValue.getFieldsMap().get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + encoder.writeString(fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue()); + encoder.writeString(fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + writeTruncationMarker(encoder); + } + + private void writeIndexBsonTimestamp(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_TIMESTAMP); + + Map timestampFields = + mapValue + .getFieldsMap() + .get(Values.RESERVED_BSON_TIMESTAMP_KEY) + .getMapValue() + .getFieldsMap(); + + long unsignedSeconds = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long unsignedIncrement = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + // BSON Timestamps are encoded as a 64-bit long with the lower 32 bits being the increment + // and the upper 32 bits being the seconds + long value = (unsignedSeconds << 32) | (unsignedIncrement & 0xFFFFFFFFL); + + encoder.writeLong(value); + } + + private void writeIndexBsonObjectId(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_OBJECT_ID); + + String oid = mapValue.getFieldsMap().get(Values.RESERVED_OBJECT_ID_KEY).getStringValue(); + encoder.writeBytes(ByteString.copyFrom(oid.getBytes())); + } + + private void writeIndexBsonBinaryData(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_BINARY); + + encoder.writeBytes( + mapValue.getFieldsMap().get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue()); + writeTruncationMarker(encoder); + } + private void writeIndexMap(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_MAP); for (Map.Entry entry : mapIndexValue.getFieldsMap().entrySet()) { @@ -177,7 +264,15 @@ private void writeIndexArray(ArrayValue arrayIndexValue, DirectionalIndexByteEnc private void writeIndexEntityRef(String referenceValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE); - ResourcePath path = ResourcePath.fromString(referenceValue); + List segments = new ArrayList<>(); + String[] parts = referenceValue.split("/"); + for (String part : parts) { + if (!part.isEmpty()) { + segments.add(part); + } + } + ResourcePath path = ResourcePath.fromSegments(segments); + int numSegments = path.length(); for (int index = DOCUMENT_NAME_OFFSET; index < numSegments; ++index) { String segment = path.getSegment(index); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java index 834fb2454a3..71456829ffb 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java @@ -15,7 +15,6 @@ package com.google.firebase.firestore.model; import static com.google.firebase.firestore.model.ServerTimestamps.getLocalWriteTime; -import static com.google.firebase.firestore.model.ServerTimestamps.isServerTimestamp; import static com.google.firebase.firestore.util.Assert.fail; import static com.google.firebase.firestore.util.Assert.hardAssert; @@ -38,19 +37,51 @@ public class Values { public static final String TYPE_KEY = "__type__"; + + public static final String RESERVED_VECTOR_KEY = "__vector__"; + // For MinKey type + public static final String RESERVED_MIN_KEY = "__min__"; + + // For MaxKey type + public static final String RESERVED_MAX_KEY = "__max__"; + + // For Regex type + public static final String RESERVED_REGEX_KEY = "__regex__"; + public static final String RESERVED_REGEX_PATTERN_KEY = "pattern"; + public static final String RESERVED_REGEX_OPTIONS_KEY = "options"; + + // For ObjectId type + public static final String RESERVED_OBJECT_ID_KEY = "__oid__"; + + // For Int32 type + public static final String RESERVED_INT32_KEY = "__int__"; + + // For RequestTimestamp + public static final String RESERVED_BSON_TIMESTAMP_KEY = "__request_timestamp__"; + + public static final String RESERVED_BSON_TIMESTAMP_SECONDS_KEY = "seconds"; + public static final String RESERVED_BSON_TIMESTAMP_INCREMENT_KEY = "increment"; + + // For BSON Binary Data + public static final String RESERVED_BSON_BINARY_KEY = "__binary__"; + + public static final String RESERVED_SERVER_TIMESTAMP_KEY = "server_timestamp"; + public static final Value NAN_VALUE = Value.newBuilder().setDoubleValue(Double.NaN).build(); public static final Value NULL_VALUE = Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build(); - public static final Value MIN_VALUE = NULL_VALUE; - public static final Value MAX_VALUE_TYPE = Value.newBuilder().setStringValue("__max__").build(); - public static final Value MAX_VALUE = + public static final Value INTERNAL_MIN_VALUE = NULL_VALUE; + public static final Value MAX_VALUE_TYPE = + Value.newBuilder().setStringValue(RESERVED_MAX_KEY).build(); + public static final Value INTERNAL_MAX_VALUE = Value.newBuilder() .setMapValue(MapValue.newBuilder().putFields(TYPE_KEY, MAX_VALUE_TYPE)) .build(); public static final Value VECTOR_VALUE_TYPE = - Value.newBuilder().setStringValue("__vector__").build(); + Value.newBuilder().setStringValue(RESERVED_VECTOR_KEY).build(); public static final String VECTOR_MAP_VECTORS_KEY = "value"; + private static final Value MIN_VECTOR_VALUE = Value.newBuilder() .setMapValue( @@ -63,21 +94,27 @@ public class Values { /** * The order of types in Firestore. This order is based on the backend's ordering, but modified to - * support server timestamps and {@link #MAX_VALUE}. + * support server timestamps and {@link #INTERNAL_MAX_VALUE}. */ public static final int TYPE_ORDER_NULL = 0; - public static final int TYPE_ORDER_BOOLEAN = 1; - public static final int TYPE_ORDER_NUMBER = 2; - public static final int TYPE_ORDER_TIMESTAMP = 3; - public static final int TYPE_ORDER_SERVER_TIMESTAMP = 4; - public static final int TYPE_ORDER_STRING = 5; - public static final int TYPE_ORDER_BLOB = 6; - public static final int TYPE_ORDER_REFERENCE = 7; - public static final int TYPE_ORDER_GEOPOINT = 8; - public static final int TYPE_ORDER_ARRAY = 9; - public static final int TYPE_ORDER_VECTOR = 10; - public static final int TYPE_ORDER_MAP = 11; + public static final int TYPE_ORDER_MIN_KEY = 1; + public static final int TYPE_ORDER_BOOLEAN = 2; + public static final int TYPE_ORDER_NUMBER = 3; + public static final int TYPE_ORDER_TIMESTAMP = 4; + public static final int TYPE_ORDER_BSON_TIMESTAMP = 5; + public static final int TYPE_ORDER_SERVER_TIMESTAMP = 6; + public static final int TYPE_ORDER_STRING = 7; + public static final int TYPE_ORDER_BLOB = 8; + public static final int TYPE_ORDER_BSON_BINARY = 9; + public static final int TYPE_ORDER_REFERENCE = 10; + public static final int TYPE_ORDER_BSON_OBJECT_ID = 11; + public static final int TYPE_ORDER_GEOPOINT = 12; + public static final int TYPE_ORDER_REGEX = 13; + public static final int TYPE_ORDER_ARRAY = 14; + public static final int TYPE_ORDER_VECTOR = 15; + public static final int TYPE_ORDER_MAP = 16; + public static final int TYPE_ORDER_MAX_KEY = 17; public static final int TYPE_ORDER_MAX_VALUE = Integer.MAX_VALUE; @@ -89,7 +126,6 @@ public static int typeOrder(Value value) { case BOOLEAN_VALUE: return TYPE_ORDER_BOOLEAN; case INTEGER_VALUE: - return TYPE_ORDER_NUMBER; case DOUBLE_VALUE: return TYPE_ORDER_NUMBER; case TIMESTAMP_VALUE: @@ -105,12 +141,27 @@ public static int typeOrder(Value value) { case ARRAY_VALUE: return TYPE_ORDER_ARRAY; case MAP_VALUE: - if (isServerTimestamp(value)) { + MapRepresentation mapType = detectMapRepresentation(value); + if (mapType.equals(MapRepresentation.SERVER_TIMESTAMP)) { return TYPE_ORDER_SERVER_TIMESTAMP; - } else if (isMaxValue(value)) { + } else if (mapType.equals(MapRepresentation.INTERNAL_MAX)) { return TYPE_ORDER_MAX_VALUE; - } else if (isVectorValue(value)) { + } else if (mapType.equals(MapRepresentation.VECTOR)) { return TYPE_ORDER_VECTOR; + } else if (mapType.equals(MapRepresentation.MIN_KEY)) { + return TYPE_ORDER_MIN_KEY; + } else if (mapType.equals(MapRepresentation.MAX_KEY)) { + return TYPE_ORDER_MAX_KEY; + } else if (mapType.equals(MapRepresentation.REGEX)) { + return TYPE_ORDER_REGEX; + } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return TYPE_ORDER_BSON_TIMESTAMP; + } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return TYPE_ORDER_BSON_OBJECT_ID; + } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return TYPE_ORDER_BSON_BINARY; + } else if (mapType.equals(MapRepresentation.INT32)) { + return TYPE_ORDER_NUMBER; } else { return TYPE_ORDER_MAP; } @@ -145,6 +196,9 @@ public static boolean equals(Value left, Value right) { case TYPE_ORDER_SERVER_TIMESTAMP: return getLocalWriteTime(left).equals(getLocalWriteTime(right)); case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_NULL: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return true; default: return left.equals(right); @@ -154,16 +208,33 @@ public static boolean equals(Value left, Value right) { private static boolean numberEquals(Value left, Value right) { if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE && right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return left.getIntegerValue() == right.getIntegerValue(); + return getIntegerValue(left) == getIntegerValue(right); } else if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE && right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { return Double.doubleToLongBits(left.getDoubleValue()) == Double.doubleToLongBits(right.getDoubleValue()); + } else if (detectMapRepresentation(left).equals(MapRepresentation.INT32) + && detectMapRepresentation(right).equals(MapRepresentation.INT32)) { + return getIntegerValue(left) == getIntegerValue(right); } return false; } + /** + * Returns a long from a 32-bit or 64-bit proto integer value. Throws an exception if the value is + * not an integer. + */ + private static long getIntegerValue(Value value) { + if (value.hasIntegerValue()) { + return value.getIntegerValue(); + } + if (value.hasMapValue() && value.getMapValue().getFieldsMap().containsKey(RESERVED_INT32_KEY)) { + return value.getMapValue().getFieldsMap().get(RESERVED_INT32_KEY).getIntegerValue(); + } + throw new IllegalArgumentException("getIntegerValue was called with a non-integer argument"); + } + private static boolean arrayEquals(Value left, Value right) { ArrayValue leftArray = left.getArrayValue(); ArrayValue rightArray = right.getArrayValue(); @@ -220,6 +291,8 @@ public static int compare(Value left, Value right) { switch (leftType) { case TYPE_ORDER_NULL: case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return 0; case TYPE_ORDER_BOOLEAN: return Util.compareBooleans(left.getBooleanValue(), right.getBooleanValue()); @@ -243,6 +316,14 @@ public static int compare(Value left, Value right) { return compareMaps(left.getMapValue(), right.getMapValue()); case TYPE_ORDER_VECTOR: return compareVectors(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_REGEX: + return compareRegex(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_OBJECT_ID: + return compareBsonObjectId(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_TIMESTAMP: + return compareBsonTimestamp(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_BINARY: + return compareBsonBinary(left.getMapValue(), right.getMapValue()); default: throw fail("Invalid value type: " + leftType); } @@ -284,15 +365,22 @@ private static int compareNumbers(Value left, Value right) { if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { double leftDouble = left.getDoubleValue(); if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // left and right are both doubles. return Util.compareDoubles(leftDouble, right.getDoubleValue()); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareMixed(leftDouble, right.getIntegerValue()); + } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(right) == MapRepresentation.INT32) { + // left is a double and right is a 32/64-bit integer. + return Util.compareMixed(leftDouble, getIntegerValue(right)); } - } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - long leftLong = left.getIntegerValue(); - if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareLongs(leftLong, right.getIntegerValue()); + } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(left) == MapRepresentation.INT32) { + long leftLong = getIntegerValue(left); + if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(right) == MapRepresentation.INT32) { + // left and right both a 32/64-bit integer. + return Util.compareLongs(leftLong, getIntegerValue(right)); } else if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // left is a 32/64-bit integer and right is a double . return -1 * Util.compareMixed(right.getDoubleValue(), leftLong); } } @@ -363,6 +451,54 @@ private static int compareMaps(MapValue left, MapValue right) { return Util.compareBooleans(iterator1.hasNext(), iterator2.hasNext()); } + private static int compareRegex(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + String leftPattern = leftMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + String rightPattern = rightMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + + int comp = Util.compareUtf8Strings(leftPattern, rightPattern); + if (comp != 0) return comp; + + String leftOption = leftMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + String rightOption = rightMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + + return leftOption.compareTo(rightOption); + } + + private static int compareBsonObjectId(MapValue left, MapValue right) { + String lhs = left.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + String rhs = right.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + return Util.compareUtf8Strings(lhs, rhs); + } + + private static int compareBsonTimestamp(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + + long leftSeconds = leftMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long rightSeconds = rightMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + + int comp = Util.compareLongs(leftSeconds, rightSeconds); + if (comp != 0) return comp; + + long leftIncrement = leftMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + long rightIncrement = rightMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + return Util.compareLongs(leftIncrement, rightIncrement); + } + + private static int compareBsonBinary(MapValue left, MapValue right) { + ByteString lhs = left.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + ByteString rhs = right.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + return Util.compareByteStrings(lhs, rhs); + } + private static int compareVectors(MapValue left, MapValue right) { Map leftMap = left.getFieldsMap(); Map rightMap = right.getFieldsMap(); @@ -396,7 +532,7 @@ private static void canonifyValue(StringBuilder builder, Value value) { builder.append(value.getBooleanValue()); break; case INTEGER_VALUE: - builder.append(value.getIntegerValue()); + builder.append(getIntegerValue(value)); break; case DOUBLE_VALUE: builder.append(value.getDoubleValue()); @@ -537,6 +673,80 @@ public static Value refValue(DatabaseId databaseId, DocumentKey key) { public static Value MIN_MAP = Value.newBuilder().setMapValue(MapValue.getDefaultInstance()).build(); + public static Value MIN_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MAX_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MIN_BSON_OBJECT_ID_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields(RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("").build())) + .build(); + + public static Value MIN_BSON_TIMESTAMP_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + // Both seconds and increment are 32 bit unsigned integers + .putFields( + RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(0).build()) + .putFields( + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(0).build())) + .build())) + .build(); + + public static Value MIN_BSON_BINARY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_BINARY_KEY, + // bsonBinaryValue should have at least one byte as subtype + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {0})) + .build())) + .build(); + + public static Value MIN_REGEX_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("").build()) + .putFields( + RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("").build())) + .build())) + .build(); + /** Returns the lowest value for the given value type (inclusive). */ public static Value getLowerBound(Value value) { switch (value.getValueTypeCase()) { @@ -560,10 +770,27 @@ public static Value getLowerBound(Value value) { case ARRAY_VALUE: return MIN_ARRAY; case MAP_VALUE: + MapRepresentation mapType = detectMapRepresentation(value); // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { + if (mapType.equals(MapRepresentation.VECTOR)) { return MIN_VECTOR_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return MIN_BSON_OBJECT_ID_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return MIN_BSON_TIMESTAMP_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return MIN_BSON_BINARY_VALUE; + } else if (mapType.equals(MapRepresentation.REGEX)) { + return MIN_REGEX_VALUE; + } else if (mapType.equals(MapRepresentation.INT32)) { + // int32Value is treated the same as integerValue and doubleValue + return MIN_NUMBER; + } else if (mapType.equals(MapRepresentation.MIN_KEY)) { + return MIN_KEY_VALUE; + } else if (mapType.equals(MapRepresentation.MAX_KEY)) { + return MAX_KEY_VALUE; } + return MIN_MAP; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); @@ -574,42 +801,185 @@ public static Value getLowerBound(Value value) { public static Value getUpperBound(Value value) { switch (value.getValueTypeCase()) { case NULL_VALUE: - return MIN_BOOLEAN; + return MIN_KEY_VALUE; case BOOLEAN_VALUE: return MIN_NUMBER; case INTEGER_VALUE: case DOUBLE_VALUE: return MIN_TIMESTAMP; case TIMESTAMP_VALUE: - return MIN_STRING; + return MIN_BSON_TIMESTAMP_VALUE; case STRING_VALUE: return MIN_BYTES; case BYTES_VALUE: - return MIN_REFERENCE; + return MIN_BSON_BINARY_VALUE; case REFERENCE_VALUE: - return MIN_GEO_POINT; + return MIN_BSON_OBJECT_ID_VALUE; case GEO_POINT_VALUE: - return MIN_ARRAY; + return MIN_REGEX_VALUE; case ARRAY_VALUE: return MIN_VECTOR_VALUE; case MAP_VALUE: - // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { + MapRepresentation mapType = detectMapRepresentation(value); + if (mapType.equals(MapRepresentation.VECTOR)) { return MIN_MAP; } - return MAX_VALUE; + if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return MIN_GEO_POINT; + } + if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return MIN_STRING; + } + if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return MIN_REFERENCE; + } + if (mapType.equals(MapRepresentation.REGEX)) { + return MIN_ARRAY; + } + if (mapType.equals(MapRepresentation.INT32)) { + // int32Value is treated the same as integerValue and doubleValue + return MIN_TIMESTAMP; + } + if (mapType.equals(MapRepresentation.MIN_KEY)) { + return MIN_BOOLEAN; + } + if (mapType.equals(MapRepresentation.MAX_KEY)) { + return INTERNAL_MAX_VALUE; + } + + return MAX_KEY_VALUE; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } } - /** Returns true if the Value represents the canonical {@link #MAX_VALUE} . */ - public static boolean isMaxValue(Value value) { - return MAX_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + static boolean isMinKey(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_MIN_KEY) + && fields.get(RESERVED_MIN_KEY).hasNullValue(); + } + + static boolean isMaxKey(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_MAX_KEY) + && fields.get(RESERVED_MAX_KEY).hasNullValue(); + } + + static boolean isInt32Value(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_INT32_KEY) + && fields.get(RESERVED_INT32_KEY).hasIntegerValue(); + } + + static boolean isBsonObjectId(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_OBJECT_ID_KEY) + && fields.get(RESERVED_OBJECT_ID_KEY).hasStringValue(); + } + + static boolean isBsonBinaryData(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_BSON_BINARY_KEY) + && fields.get(RESERVED_BSON_BINARY_KEY).hasBytesValue(); + } + + static boolean isRegexValue(Map fields) { + if (fields.size() == 1 + && fields.containsKey(RESERVED_REGEX_KEY) + && fields.get(RESERVED_REGEX_KEY).hasMapValue()) { + MapValue innerMapValue = fields.get(RESERVED_REGEX_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_REGEX_PATTERN_KEY) + && values.containsKey(RESERVED_REGEX_OPTIONS_KEY) + && values.get(RESERVED_REGEX_PATTERN_KEY).hasStringValue() + && values.get(RESERVED_REGEX_OPTIONS_KEY).hasStringValue(); + } + return false; + } + + static boolean isBsonTimestamp(Map fields) { + if (fields.size() == 1 + && fields.containsKey(RESERVED_BSON_TIMESTAMP_KEY) + && fields.get(RESERVED_BSON_TIMESTAMP_KEY).hasMapValue()) { + MapValue innerMapValue = fields.get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_BSON_TIMESTAMP_SECONDS_KEY) + && values.containsKey(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY) + && values.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).hasIntegerValue() + && values.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).hasIntegerValue(); + } + return false; } - /** Returns true if the Value represents a VectorValue . */ - public static boolean isVectorValue(Value value) { - return VECTOR_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + public enum MapRepresentation { + REGEX, + BSON_OBJECT_ID, + INT32, + BSON_TIMESTAMP, + BSON_BINARY, + MIN_KEY, + MAX_KEY, + INTERNAL_MAX, + VECTOR, + SERVER_TIMESTAMP, + REGULAR_MAP + } + + public static MapRepresentation detectMapRepresentation(Value value) { + if (value == null + || value.getMapValue() == null + || value.getMapValue().getFieldsMap() == null) { + return MapRepresentation.REGULAR_MAP; + } + + Map fields = value.getMapValue().getFieldsMap(); + + // Check for type-based mappings + if (fields.containsKey(TYPE_KEY)) { + String typeString = fields.get(TYPE_KEY).getStringValue(); + + if (typeString.equals(RESERVED_VECTOR_KEY)) { + return MapRepresentation.VECTOR; + } + if (typeString.equals(RESERVED_MAX_KEY)) { + return MapRepresentation.INTERNAL_MAX; + } + if (typeString.equals(RESERVED_SERVER_TIMESTAMP_KEY)) { + return MapRepresentation.SERVER_TIMESTAMP; + } + } + + if (fields.size() != 1) { + // All BSON types have 1 key in the map. To improve performance, we can + // return early if the number of keys in the map is not 1. + return MapRepresentation.REGULAR_MAP; + } + + // Check for BSON-related mappings + if (isRegexValue(fields)) { + return MapRepresentation.REGEX; + } + if (isBsonObjectId(fields)) { + return MapRepresentation.BSON_OBJECT_ID; + } + if (isInt32Value(fields)) { + return MapRepresentation.INT32; + } + if (isBsonTimestamp(fields)) { + return MapRepresentation.BSON_TIMESTAMP; + } + if (isBsonBinaryData(fields)) { + return MapRepresentation.BSON_BINARY; + } + if (isMinKey(fields)) { + return MapRepresentation.MIN_KEY; + } + if (isMaxKey(fields)) { + return MapRepresentation.MAX_KEY; + } + + return MapRepresentation.REGULAR_MAP; } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java index 6e0df1e6d4a..074e6ef6a25 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java @@ -20,13 +20,20 @@ import android.net.Uri; import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.DocumentId; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.Exclude; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.IgnoreExtraProperties; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; import com.google.firebase.firestore.PropertyName; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.ServerTimestamp; import com.google.firebase.firestore.ThrowOnExtraProperties; import com.google.firebase.firestore.VectorValue; @@ -175,7 +182,14 @@ private static Object serialize(T o, ErrorPath path) { || o instanceof Blob || o instanceof DocumentReference || o instanceof FieldValue - || o instanceof VectorValue) { + || o instanceof VectorValue + || o instanceof MinKey + || o instanceof MaxKey + || o instanceof RegexValue + || o instanceof Int32Value + || o instanceof BsonTimestamp + || o instanceof BsonObjectId + || o instanceof BsonBinaryData) { return o; } else if (o instanceof Uri || o instanceof URI || o instanceof URL) { return o.toString(); @@ -245,6 +259,20 @@ private static T deserializeToClass(Object o, Class clazz, DeserializeCon return (T) convertDocumentReference(o, context); } else if (VectorValue.class.isAssignableFrom(clazz)) { return (T) convertVectorValue(o, context); + } else if (Int32Value.class.isAssignableFrom(clazz)) { + return (T) convertInt32Value(o, context); + } else if (BsonTimestamp.class.isAssignableFrom(clazz)) { + return (T) convertBsonTimestamp(o, context); + } else if (BsonObjectId.class.isAssignableFrom(clazz)) { + return (T) convertBsonObjectId(o, context); + } else if (BsonBinaryData.class.isAssignableFrom(clazz)) { + return (T) convertBsonBinaryData(o, context); + } else if (MinKey.class.isAssignableFrom(clazz)) { + return (T) convertMinKey(o, context); + } else if (MaxKey.class.isAssignableFrom(clazz)) { + return (T) convertMaxKey(o, context); + } else if (RegexValue.class.isAssignableFrom(clazz)) { + return (T) convertRegexValue(o, context); } else if (clazz.isArray()) { throw deserializeError( context.errorPath, "Converting to Arrays is not supported, please use Lists instead"); @@ -542,6 +570,77 @@ private static VectorValue convertVectorValue(Object o, DeserializeContext conte } } + private static Int32Value convertInt32Value(Object o, DeserializeContext context) { + if (o instanceof Int32Value) { + return (Int32Value) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to Int32Value"); + } + } + + private static BsonTimestamp convertBsonTimestamp(Object o, DeserializeContext context) { + if (o instanceof BsonTimestamp) { + return (BsonTimestamp) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonTimestamp"); + } + } + + private static BsonObjectId convertBsonObjectId(Object o, DeserializeContext context) { + if (o instanceof BsonObjectId) { + return (BsonObjectId) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonObjectId"); + } + } + + private static BsonBinaryData convertBsonBinaryData(Object o, DeserializeContext context) { + + if (o instanceof BsonBinaryData) { + return (BsonBinaryData) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonBinaryData"); + } + } + + private static RegexValue convertRegexValue(Object o, DeserializeContext context) { + if (o instanceof RegexValue) { + return (RegexValue) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to RegexValue"); + } + } + + private static MinKey convertMinKey(Object o, DeserializeContext context) { + if (o instanceof MinKey) { + return (MinKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MinKey"); + } + } + + private static MaxKey convertMaxKey(Object o, DeserializeContext context) { + if (o instanceof MaxKey) { + return (MaxKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MaxKey"); + } + } + private static DocumentReference convertDocumentReference(Object o, DeserializeContext context) { if (o instanceof DocumentReference) { return (DocumentReference) o; diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..34ae59306db --- /dev/null +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,151 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.Assert.assertThrows; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.annotation.Config; + +@RunWith(RobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public class BsonTypesTest { + + @Test + public void testBsonObjectIdEquality() { + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId differentObjectId = new BsonObjectId("507f191e810c19729de860eb"); + + assertEquals(bsonObjectId, bsonObjectIdDup); + assertNotEquals(bsonObjectId, differentObjectId); + assertNotEquals(bsonObjectIdDup, differentObjectId); + + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), differentObjectId.hashCode()); + assertNotEquals(bsonObjectIdDup.hashCode(), differentObjectId.hashCode()); + } + + @Test + public void testBsonTimeStampEquality() { + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); + BsonTimestamp differentSecondsTimestamp = new BsonTimestamp(2, 2); + BsonTimestamp differentIncrementTimestamp = new BsonTimestamp(1, 3); + + assertEquals(bsonTimestamp, bsonTimestampDup); + assertNotEquals(bsonTimestamp, differentSecondsTimestamp); + assertNotEquals(bsonTimestamp, differentIncrementTimestamp); + assertNotEquals(bsonTimestampDup, differentSecondsTimestamp); + assertNotEquals(bsonTimestampDup, differentIncrementTimestamp); + + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), differentSecondsTimestamp.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), differentIncrementTimestamp.hashCode()); + assertNotEquals(bsonTimestampDup.hashCode(), differentSecondsTimestamp.hashCode()); + assertNotEquals(bsonTimestampDup.hashCode(), differentIncrementTimestamp.hashCode()); + } + + @Test + public void testBsonBinaryDataEquality() { + BsonBinaryData bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDataDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData differentSubtypeBinaryData = BsonBinaryData.fromBytes(2, new byte[] {1, 2, 3}); + BsonBinaryData differentDataBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}); + + assertEquals(bsonBinaryData, bsonBinaryDataDup); + assertNotEquals(bsonBinaryData, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryData, differentDataBinaryData); + assertNotEquals(bsonBinaryDataDup, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryDataDup, differentDataBinaryData); + + assertEquals(bsonBinaryData.hashCode(), bsonBinaryDataDup.hashCode()); + assertNotEquals(bsonBinaryData.hashCode(), differentSubtypeBinaryData.hashCode()); + assertNotEquals(bsonBinaryData.hashCode(), differentDataBinaryData.hashCode()); + assertNotEquals(bsonBinaryDataDup.hashCode(), differentSubtypeBinaryData.hashCode()); + assertNotEquals(bsonBinaryDataDup.hashCode(), differentDataBinaryData.hashCode()); + } + + @Test + public void testRegexEquality() { + RegexValue regex = new RegexValue("^foo", "i"); + RegexValue regexDup = new RegexValue("^foo", "i"); + RegexValue differentPatternRegex = new RegexValue("^bar", "i"); + RegexValue differentOptionsRegex = new RegexValue("^foo", "m"); + + assertEquals(regex, regexDup); + assertNotEquals(regex, differentPatternRegex); + assertNotEquals(regex, differentOptionsRegex); + assertNotEquals(regexDup, differentPatternRegex); + assertNotEquals(regexDup, differentOptionsRegex); + + assertEquals(regex.hashCode(), regexDup.hashCode()); + assertNotEquals(regex.hashCode(), differentPatternRegex.hashCode()); + assertNotEquals(regex.hashCode(), differentOptionsRegex.hashCode()); + assertNotEquals(regexDup.hashCode(), differentPatternRegex.hashCode()); + assertNotEquals(regexDup.hashCode(), differentOptionsRegex.hashCode()); + } + + @Test + public void testInt32Equality() { + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = new Int32Value(1); + Int32Value differentInt32 = new Int32Value(2); + + assertEquals(int32, int32Dup); + assertNotEquals(int32, differentInt32); + assertNotEquals(int32Dup, differentInt32); + + assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertNotEquals(int32.hashCode(), differentInt32.hashCode()); + assertNotEquals(int32Dup.hashCode(), differentInt32.hashCode()); + } + + @Test + public void testMaxKeyIsSingleton() { + MaxKey maxKey = MaxKey.instance(); + MaxKey maxKeyDup = MaxKey.instance(); + assertEquals(maxKey, maxKeyDup); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + } + + @Test + public void testMinKeyIsSingleton() { + MinKey minKey = MinKey.instance(); + MinKey minKeyDup = MinKey.instance(); + assertEquals(minKey, minKeyDup); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + } + + @Test + public void testMinKeyMaxKeyNullNotEqual() { + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + assertNotEquals(minKey, maxKey); + assertNotEquals(minKey, null); + assertNotEquals(maxKey, null); + assertNotEquals(minKey.hashCode(), maxKey.hashCode()); + } + + @Test + public void testThrows() { + assertThrows( + IllegalArgumentException.class, () -> BsonBinaryData.fromBytes(256, new byte[] {1})); + } +} diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java index 7540c06d2e5..a8837e06b1c 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java @@ -32,6 +32,21 @@ public void testEquals() { FieldValue deleteDup = FieldValue.delete(); FieldValue serverTimestamp = FieldValue.serverTimestamp(); FieldValue serverTimestampDup = FieldValue.serverTimestamp(); + RegexValue regex = new RegexValue("pattern", "options"); + RegexValue regexDup = new RegexValue("pattern", "options"); + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = new Int32Value(1); + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); + BsonBinaryData bsonBinary = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + MinKey minKey = MinKey.instance(); + MinKey minKeyDup = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + MaxKey maxKeyDup = MaxKey.instance(); + assertEquals(delete, deleteDup); assertEquals(serverTimestamp, serverTimestampDup); assertNotEquals(delete, serverTimestamp); @@ -39,5 +54,77 @@ public void testEquals() { assertEquals(delete.hashCode(), deleteDup.hashCode()); assertEquals(serverTimestamp.hashCode(), serverTimestampDup.hashCode()); assertNotEquals(delete.hashCode(), serverTimestamp.hashCode()); + + // BSON types + assertEquals(regex, regexDup); + assertEquals(int32, int32Dup); + assertEquals(bsonTimestamp, bsonTimestampDup); + assertEquals(bsonObjectId, bsonObjectIdDup); + assertEquals(bsonBinary, bsonBinaryDup); + assertEquals(minKey, minKeyDup); + assertEquals(maxKey, maxKeyDup); + assertNotEquals(delete, serverTimestamp); + + // BSON types are not equal to each other + assertNotEquals(regex, int32); + assertNotEquals(regex, bsonTimestamp); + assertNotEquals(regex, bsonObjectId); + assertNotEquals(regex, bsonBinary); + assertNotEquals(regex, minKey); + assertNotEquals(regex, maxKey); + + assertNotEquals(int32, bsonTimestamp); + assertNotEquals(int32, bsonObjectId); + assertNotEquals(int32, bsonBinary); + assertNotEquals(int32, minKey); + assertNotEquals(int32, maxKey); + + assertNotEquals(bsonTimestamp, bsonObjectId); + assertNotEquals(bsonTimestamp, bsonBinary); + assertNotEquals(bsonTimestamp, minKey); + assertNotEquals(bsonTimestamp, maxKey); + + assertNotEquals(bsonObjectId, bsonBinary); + assertNotEquals(bsonObjectId, minKey); + assertNotEquals(bsonObjectId, maxKey); + + assertNotEquals(minKey, maxKey); + + // BSON types hash codes + assertEquals(regex.hashCode(), regexDup.hashCode()); + assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + assertEquals(bsonBinary.hashCode(), bsonBinaryDup.hashCode()); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + + // BSON types hash codes are not equal to each other + assertNotEquals(regex.hashCode(), int32.hashCode()); + assertNotEquals(regex.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(regex.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(regex.hashCode(), bsonBinary.hashCode()); + assertNotEquals(regex.hashCode(), minKey.hashCode()); + assertNotEquals(regex.hashCode(), maxKey.hashCode()); + + assertNotEquals(int32.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(int32.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(int32.hashCode(), bsonBinary.hashCode()); + assertNotEquals(int32.hashCode(), minKey.hashCode()); + assertNotEquals(int32.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonTimestamp.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), minKey.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonObjectId.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), minKey.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonBinary.hashCode(), minKey.hashCode()); + assertNotEquals(bsonBinary.hashCode(), maxKey.hashCode()); + + assertNotEquals(minKey.hashCode(), maxKey.hashCode()); } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java index a856f316ff1..e6a12ee3e95 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java @@ -34,6 +34,7 @@ import com.google.firebase.firestore.model.Values; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -220,6 +221,81 @@ public void testConvertsGeoPointValue() { } } + @Test + public void testConvertsBsonObjectIdValue() { + List testCases = asList(new BsonObjectId("foo"), new BsonObjectId("bar")); + for (BsonObjectId p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonTimestampValue() { + List testCases = asList(new BsonTimestamp(1, 2), new BsonTimestamp(3, 4)); + for (BsonTimestamp p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonBinaryValue() { + List testCases = + asList( + BsonBinaryData.fromBytes(1, new byte[] {1, 2}), + BsonBinaryData.fromByteString(1, ByteString.EMPTY), + BsonBinaryData.fromBytes(1, new byte[] {1, 2})); + for (BsonBinaryData p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsRegexValue() { + List testCases = asList(new RegexValue("^foo", "i"), new RegexValue("^bar", "g")); + for (RegexValue p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsInt32Value() { + List testCases = + asList(new Int32Value(1), new Int32Value(-1), new Int32Value(0), new Int32Value(123)); + for (Int32Value p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMinKey() { + List testCases = asList(MinKey.instance(), MinKey.instance()); + for (MinKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMaxKey() { + List testCases = asList(MaxKey.instance(), MaxKey.instance()); + for (MaxKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + @Test public void testConvertsEmptyObjects() { assertEquals(wrapObject(), new ObjectValue()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java index 355833a6586..6e0186adac8 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java @@ -30,6 +30,7 @@ import com.google.firebase.firestore.model.ObjectValue; import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.model.SnapshotVersion; +import com.google.firebase.firestore.model.Values; import com.google.firebase.firestore.remote.RemoteSerializer; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.ArrayValue; @@ -215,6 +216,109 @@ public void testDecodesReferenceValues() throws JSONException { assertDecodesValue(json, proto.build()); } + @Test + public void testDecodesBsonObjectIdValues() throws JSONException { + String json = "{ mapValue: { fields: { __oid__: { stringValue: 'foo' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("foo").build())); + + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonTimestampValues() throws JSONException { + String json = + "{ mapValue: { fields: { __request_timestamp__: { mapValue: { fields: { seconds: { integerValue: 12345 }, increment: { integerValue: 67 } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(67).build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonBinaryDataValues() throws JSONException { + String json = "{ mapValue: { fields: { __binary__: { bytesValue: 'AAECAw==' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder().setBytesValue(TestUtil.byteString(0, 1, 2, 3)).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesRegexValues() throws JSONException { + String json = + "{ mapValue: { fields: { __regex__: { mapValue: { fields: { pattern: { stringValue: '^foo' }, options: { stringValue: 'i' } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("i").build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesInt32Values() throws JSONException { + String json = "{ mapValue: { fields: { __int__: { integerValue: 12345 } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(12345).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMinKey() throws JSONException { + String json = "{ mapValue: { fields: { __min__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMaxKey() throws JSONException { + String json = "{ mapValue: { fields: { __max__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + @Test public void testDecodesArrayValues() throws JSONException { String json = diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java index cdc932bfa01..0f8e6c22389 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java @@ -34,7 +34,14 @@ import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -839,6 +846,29 @@ public void testCanonicalIdsAreStable() { "collection|f:|ob:aasc__name__asc|ub:a:foo,[1,2,3]"); assertCanonicalId(baseQuery.limitToFirst(5), "collection|f:|ob:__name__asc|l:5"); assertCanonicalId(baseQuery.limitToLast(5), "collection|f:|ob:__name__desc|l:5"); + + // BSON types + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new BsonObjectId("foo"))), + "collection|f:a<={__oid__:foo}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))), + "collection|f:a<={__binary__:01010203}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new BsonTimestamp(1, 2))), + "collection|f:a<={__request_timestamp__:{increment:2,seconds:1}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new RegexValue("^foo", "i"))), + "collection|f:a<={__regex__:{options:i,pattern:^foo}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new Int32Value(1))), + "collection|f:a<={__int__:1}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", MinKey.instance())), + "collection|f:a<={__min__:null}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", MaxKey.instance())), + "collection|f:a<={__max__:null}|ob:aasc__name__asc"); } private void assertCanonicalId(Query query, String expectedCanonicalId) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java index bad5ee427fa..f5d89ef55d8 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java @@ -151,12 +151,12 @@ public void orderByQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -183,7 +183,7 @@ public void startAtQueryBound() { Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -259,7 +259,7 @@ public void endAtQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java index 6acb576666a..6f1eea8cbbb 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java @@ -20,6 +20,7 @@ import com.google.firebase.firestore.model.DatabaseId; import com.google.firebase.firestore.model.FieldIndex; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.concurrent.ExecutionException; import org.junit.Assert; import org.junit.Test; @@ -100,4 +101,281 @@ public void writeIndexValueSupportsEmptyVector() { // Assert actual and expected encodings are equal Assert.assertArrayEquals(actualBytes, expectedBytes); } + + @Test + public void writeIndexValueSupportsBsonObjectId() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonObjectId("507f191e810c19729de860ea")); + + // Encode an actual ObjectIdValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_OBJECT_ID); // ObjectId type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom("507f191e810c19729de860ea".getBytes())); // ObjectId value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1, 1, 2, 3})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryWithEmptyData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(1, 2)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(1L << 32 | 2 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(4294967295L, 4294967295L)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong( + 4294967295L << 32 | 4294967295L & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(0, 0)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(0L << 32 | 0 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsRegex() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new RegexValue("^foo", "i")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.INDEX_TYPE_REGEX); // Regex type + expectedDirectionalEncoder.writeString("^foo"); // Regex pattern + expectedDirectionalEncoder.writeString("i"); // Regex options + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.NOT_TRUNCATED); // writeTruncationMarker + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsInt32() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(1)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(1); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(2147483647)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(2147483647); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(-2147483648)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(-2147483648); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMinKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(MinKey.instance()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MIN_KEY); // MinKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMaxKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(MaxKey.instance()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MAX_KEY); // MaxKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + Assert.assertArrayEquals(actualBytes, expectedBytes); + } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java index 21823b1af42..499a60a734a 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java @@ -63,6 +63,7 @@ import com.google.firebase.firestore.core.Target; import com.google.firebase.firestore.model.Document; import com.google.firebase.firestore.model.DocumentKey; +import com.google.firebase.firestore.model.DocumentSet; import com.google.firebase.firestore.model.FieldIndex; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -111,7 +112,7 @@ public abstract class LocalStoreTestCase { private List batches; private @Nullable ImmutableSortedMap lastChanges; - private @Nullable QueryResult lastQueryResult; + private @Nullable DocumentSet lastQueryResult; private int lastTargetId; abstract Persistence getPersistence(); @@ -214,7 +215,11 @@ protected int allocateQuery(Query query) { protected void executeQuery(Query query) { resetPersistenceStats(); - lastQueryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + QueryResult queryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + lastQueryResult = DocumentSet.emptySet(query.comparator()); + for (Entry entry : queryResult.getDocuments()) { + lastQueryResult = lastQueryResult.add(entry.getValue()); + } } protected void setIndexAutoCreationEnabled(boolean isEnabled) { @@ -310,8 +315,12 @@ private void assertNotContains(String keyPathString) { protected void assertQueryReturned(String... keys) { assertNotNull(lastQueryResult); - ImmutableSortedMap documents = lastQueryResult.getDocuments(); - assertThat(keys(documents)).containsExactly(Arrays.stream(keys).map(TestUtil::key).toArray()); + assertEquals(lastQueryResult.size(), keys.length); + List expectedKeys = + Arrays.stream(keys).map(TestUtil::key).collect(Collectors.toList()); + List actualKeys = + lastQueryResult.toList().stream().map(Document::getKey).collect(Collectors.toList()); + assertEquals(expectedKeys, actualKeys); } private void assertQueryDocumentMapping(int targetId, DocumentKey... keys) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java index 57b0fc36ae4..fce0404c342 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java @@ -18,6 +18,7 @@ import static com.google.firebase.firestore.model.FieldIndex.IndexState; import static com.google.firebase.firestore.model.FieldIndex.Segment.Kind; import static com.google.firebase.firestore.testutil.TestUtil.andFilters; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.bound; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -30,12 +31,22 @@ import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.path; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.version; import static com.google.firebase.firestore.testutil.TestUtil.wrap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.auth.User; import com.google.firebase.firestore.core.Filter; import com.google.firebase.firestore.core.Query; @@ -1233,6 +1244,352 @@ public void TestCreateTargetIndexesUpgradesPartialIndexToFullIndex() { validateIndexType(subQuery2, IndexManager.IndexType.NONE); } + @Test + public void testIndexesBsonObjectId() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query); + } + + @Test + public void testIndexesBsonBinary() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc3"); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); + verifyResults(query); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query); + } + + @Test + public void testIndexesBsonTimestamp() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc2", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc3", map("key", new BsonTimestamp(2, 1))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1, 1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1, 1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(2, 1))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 1))); + verifyResults(query); + } + + @Test + public void testIndexesRegex() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc2", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc3", map("key", new RegexValue("b", "i"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new RegexValue("a", "i"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new RegexValue("a", "i"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new RegexValue("b", "i"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new RegexValue("a", "i"))); + verifyResults(query); + } + + @Test + public void testIndexesInt32() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new Int32Value(1))); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(3))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Int32Value(1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Int32Value(1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Int32Value(2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Int32Value(2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Int32Value(2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new Int32Value(2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Int32Value(3))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Int32Value(1))); + verifyResults(query); + } + + @Test + public void testIndexesMinKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", MinKey.instance())); + addDoc("coll/doc3", map("key", null)); + addDoc("coll/doc4", map("key", 1)); + addDoc("coll/doc5", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc3", "coll/doc1", "coll/doc2", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "!=", MinKey.instance())); + verifyResults(query, "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", MinKey.instance())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", MinKey.instance())); + verifyResults(query); + } + + @Test + public void testIndexesMaxKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", 1)); + addDoc("coll/doc3", map("key", MaxKey.instance())); + addDoc("coll/doc4", map("key", MaxKey.instance())); + addDoc("coll/doc5", map("key", null)); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc5", "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "==", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", ">", MaxKey.instance())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", MaxKey.instance())); + verifyResults(query); + } + + @Test + public void testIndexFieldsOfBsonTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(1))); + addDoc("coll/doc4", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc5", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc10", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc11", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc12", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/doc12", // maxKey + "coll/doc10", // regex m + "coll/doc11", // regex i + "coll/doc8", // objectId eb + "coll/doc9", // objectId ea + "coll/doc6", // binary [1,2,4] + "coll/doc7", // binary [1,2,3] + "coll/doc4", // timestamp 1,2 + "coll/doc5", // timestamp 1,1 + "coll/doc2", // int32 2 + "coll/doc3", // int32 1 + "coll/doc1" // minKey + ); + } + + @Test + public void testIndexFieldsOfAllTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/a", map("key", null)); + addDoc("coll/b", map("key", MinKey.instance())); + addDoc("coll/c", map("key", true)); + addDoc("coll/d", map("key", Double.NaN)); + addDoc("coll/e", map("key", new Int32Value(1))); + addDoc("coll/f", map("key", 2.0)); + addDoc("coll/g", map("key", 3L)); + addDoc("coll/h", map("key", new Timestamp(100, 123456000))); + addDoc("coll/i", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/j", map("key", "string")); + addDoc("coll/k", map("key", blob(1, 2, 3))); + addDoc("coll/l", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/m", map("key", ref("foo/bar"))); + addDoc("coll/n", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/o", map("key", new GeoPoint(0, 1))); + addDoc("coll/p", map("key", new RegexValue("^foo", "i"))); + addDoc("coll/q", map("key", Arrays.asList(1, 2))); + // Note: Vector type not available in Java SDK, skipping 'r' + addDoc("coll/s", map("key", map("a", 1))); + addDoc("coll/t", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/t", // maxKey + "coll/s", // map + "coll/q", // array + "coll/p", // regex + "coll/o", // geopoint + "coll/n", // objectId + "coll/m", // reference + "coll/l", // bsonBinary + "coll/k", // bytes + "coll/j", // string + "coll/i", // bsonTimestamp + "coll/h", // timestamp + "coll/g", // long + "coll/f", // double + "coll/e", // int32 + "coll/d", // NaN + "coll/c", // boolean + "coll/b", // minKey + "coll/a" // null + ); + } + private void validateIndexType(Query query, IndexManager.IndexType expected) { IndexManager.IndexType indexType = indexManager.getIndexType(query.toTarget()); assertEquals(indexType, expected); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java index 63569e6dc85..1edbef0474c 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java @@ -16,6 +16,7 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.firebase.firestore.testutil.TestUtil.addedRemoteEvent; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.deleteMutation; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -27,6 +28,7 @@ import static com.google.firebase.firestore.testutil.TestUtil.orFilters; import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.setMutation; import static com.google.firebase.firestore.testutil.TestUtil.updateRemoteEvent; import static com.google.firebase.firestore.testutil.TestUtil.version; @@ -34,7 +36,15 @@ import static java.util.Collections.singletonList; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.Query; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.FieldIndex; @@ -367,6 +377,836 @@ public void testIndexesVectorValues() { assertQueryReturned("coll/doc4", "coll/doc3"); } + @Test + public void testIndexesBsonObjectId() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation( + setMutation("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + new BsonObjectId("507f191e810c19729de860ea"), + new BsonObjectId("507f191e810c19729de860eb")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesBsonTimestamp() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc2", map("key", new BsonTimestamp(1001, 1000)))); + writeMutation(setMutation("coll/doc3", map("key", new BsonTimestamp(1000, 1001)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1001, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new BsonTimestamp(1000, 1000), new BsonTimestamp(1000, 1001)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesBsonBinary() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); + writeMutation( + setMutation("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc4", map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2})))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 4, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(2, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesRegex() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc2", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc3", map("key", new RegexValue("^foo", "i")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", new RegexValue("^foo", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new RegexValue("^bar", "i"), new RegexValue("^foo", "i")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesInt32() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation(setMutation("coll/doc1", map("key", new Int32Value(-1)))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(0)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Int32Value(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Int32Value(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Int32Value(1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(new Int32Value(-1), new Int32Value(0)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesMinKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc4", map("key", 1))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "!=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(MinKey.instance(), MaxKey.instance()))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc5"); + } + + @Test + public void testIndexesMaxKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", 1))); + writeMutation(setMutation("coll/doc4", map("key", MaxKey.instance()))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + } + + @Test + public void testIndexesAllBsonTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.DESCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(2)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); + writeMutation(setMutation("coll/doc4", map("key", new BsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc5", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation( + setMutation("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc10", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc11", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc12", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc12", + "coll/doc10", + "coll/doc11", + "coll/doc8", + "coll/doc9", + "coll/doc6", + "coll/doc7", + "coll/doc4", + "coll/doc5", + "coll/doc2", + "coll/doc3", + "coll/doc1"); + } + + @Test + public void testIndexesAllTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", true))); + writeMutation(setMutation("coll/doc4", map("key", Double.NaN))); + writeMutation(setMutation("coll/doc5", map("key", new Int32Value(1)))); + writeMutation(setMutation("coll/doc6", map("key", 2.0))); + writeMutation(setMutation("coll/doc7", map("key", 3))); + writeMutation(setMutation("coll/doc8", map("key", new Timestamp(100, 123456000)))); + writeMutation(setMutation("coll/doc9", map("key", new BsonTimestamp(1, 2)))); + writeMutation(setMutation("coll/doc10", map("key", "string"))); + writeMutation(setMutation("coll/doc11", map("key", blob(1, 2, 3)))); + writeMutation( + setMutation("coll/doc12", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc13", map("key", ref("foo/bar")))); + writeMutation( + setMutation("coll/doc14", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc15", map("key", new GeoPoint(1, 2)))); + writeMutation(setMutation("coll/doc16", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc17", map("key", Arrays.asList(2, "foo")))); + writeMutation(setMutation("coll/doc18", map("key", FieldValue.vector(new double[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc19", map("key", map("bar", 1, "foo", 2)))); + writeMutation(setMutation("coll/doc20", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set, + "coll/doc13", + CountingQueryEngine.OverlayType.Set, + "coll/doc14", + CountingQueryEngine.OverlayType.Set, + "coll/doc15", + CountingQueryEngine.OverlayType.Set, + "coll/doc16", + CountingQueryEngine.OverlayType.Set, + "coll/doc17", + CountingQueryEngine.OverlayType.Set, + "coll/doc18", + CountingQueryEngine.OverlayType.Set, + "coll/doc19", + CountingQueryEngine.OverlayType.Set, + "coll/doc20", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc1", + "coll/doc2", + "coll/doc3", + "coll/doc4", + "coll/doc5", + "coll/doc6", + "coll/doc7", + "coll/doc8", + "coll/doc9", + "coll/doc10", + "coll/doc11", + "coll/doc12", + "coll/doc13", + "coll/doc14", + "coll/doc15", + "coll/doc16", + "coll/doc17", + "coll/doc18", + "coll/doc19", + "coll/doc20"); + } + @Test public void testIndexesServerTimestamps() { FieldIndex index = @@ -493,7 +1333,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e"); backfillIndexes(); @@ -501,7 +1341,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/f", "coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e", "coll/f"); } @Test @@ -521,7 +1361,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { // SDK will not create indexes since collection size is too small. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); backfillIndexes(); @@ -529,7 +1369,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 3); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/f", "coll/a"); } @Test @@ -598,7 +1438,7 @@ public void testIndexAutoCreationWorksWhenBackfillerRunsHalfway() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 1, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/a", "coll/f", "coll/e"); } @Test @@ -621,7 +1461,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); setIndexAutoCreationEnabled(false); @@ -631,7 +1471,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/a", "coll/f"); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index 6a7dbe9c259..66b0ff937d7 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -23,14 +23,25 @@ import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.wrapRef; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import com.google.common.testing.EqualsTester; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; +import com.google.firebase.firestore.model.Values.MapRepresentation; import com.google.firebase.firestore.testutil.ComparatorTester; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; @@ -64,6 +75,28 @@ public void testValueEquality() { GeoPoint geoPoint2 = new GeoPoint(0, 2); Timestamp timestamp1 = new Timestamp(date1); Timestamp timestamp2 = new Timestamp(date2); + + BsonObjectId objectId1 = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId objectId2 = new BsonObjectId("507f191e810c19729de860eb"); + + BsonBinaryData binaryData1 = BsonBinaryData.fromBytes(1, new byte[] {1, 2}); + BsonBinaryData binaryData2 = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData binaryData3 = BsonBinaryData.fromBytes(2, new byte[] {1, 2}); + + BsonTimestamp bsonTimestamp1 = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestamp2 = new BsonTimestamp(1, 3); + BsonTimestamp bsonTimestamp3 = new BsonTimestamp(2, 2); + + Int32Value int32Value1 = new Int32Value(1); + Int32Value int32Value2 = new Int32Value(2); + + RegexValue regexValue1 = new RegexValue("^foo", "i"); + RegexValue regexValue2 = new RegexValue("^foo", "m"); + RegexValue regexValue3 = new RegexValue("^bar", "i"); + + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + new EqualsTester() .addEqualityGroup(wrap(true), wrap(true)) .addEqualityGroup(wrap(false), wrap(false)) @@ -108,6 +141,22 @@ public void testValueEquality() { .addEqualityGroup(wrap(map("bar", 2, "foo", 1))) .addEqualityGroup(wrap(map("bar", 1))) .addEqualityGroup(wrap(map("foo", 1))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea")), wrap(objectId1)) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb")), wrap(objectId2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2})), wrap(binaryData1)) + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), wrap(binaryData2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(2, new byte[] {1, 2})), wrap(binaryData3)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 2)), wrap(bsonTimestamp1)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 3)), wrap(bsonTimestamp2)) + .addEqualityGroup(wrap(new BsonTimestamp(2, 2)), wrap(bsonTimestamp3)) + .addEqualityGroup(wrap(new Int32Value(1)), wrap(int32Value1)) + .addEqualityGroup(wrap(new Int32Value(2)), wrap(int32Value2)) + .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(regexValue1)) + .addEqualityGroup(wrap(new RegexValue("^foo", "m")), wrap(regexValue2)) + .addEqualityGroup(wrap(new RegexValue("^bar", "i")), wrap(regexValue3)) + .addEqualityGroup(wrap(MinKey.instance()), wrap(minKey)) + .addEqualityGroup(wrap(MaxKey.instance()), wrap(maxKey)) .testEquals(); } @@ -120,27 +169,32 @@ public void testValueOrdering() { // null first .addEqualityGroup(wrap((Object) null)) + // MinKey is after null + .addEqualityGroup(wrap(MinKey.instance())) + // booleans .addEqualityGroup(wrap(false)) .addEqualityGroup(wrap(true)) - // numbers + // 64-bit and 32-bit numbers order together numerically. .addEqualityGroup(wrap(Double.NaN)) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(-Double.MAX_VALUE)) .addEqualityGroup(wrap(Long.MIN_VALUE)) + .addEqualityGroup(wrap(new Int32Value(-2147483648)), wrap(Integer.MIN_VALUE)) .addEqualityGroup(wrap(-1.1)) .addEqualityGroup(wrap(-1.0)) .addEqualityGroup(wrap(-Double.MIN_NORMAL)) .addEqualityGroup(wrap(-Double.MIN_VALUE)) // Zeros all compare the same. - .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L)) + .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L), wrap(new Int32Value(0))) .addEqualityGroup(wrap(Double.MIN_VALUE)) .addEqualityGroup(wrap(Double.MIN_NORMAL)) .addEqualityGroup(wrap(0.1)) - // Doubles and Longs compareTo() the same. - .addEqualityGroup(wrap(1.0), wrap(1L)) + // Doubles, Longs, Int32Values compareTo() the same. + .addEqualityGroup(wrap(1.0), wrap(1L), wrap(new Int32Value(1))) .addEqualityGroup(wrap(1.1)) + .addEqualityGroup(wrap(new Int32Value(2147483647)), wrap(Integer.MAX_VALUE)) .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) @@ -149,6 +203,11 @@ public void testValueOrdering() { .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(date2)) + // bson timestamps + .addEqualityGroup(wrap(new BsonTimestamp(123, 4))) + .addEqualityGroup(wrap(new BsonTimestamp(123, 5))) + .addEqualityGroup(wrap(new BsonTimestamp(124, 0))) + // server timestamps come after all concrete timestamps. .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date1), null))) .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date2), null))) @@ -172,6 +231,15 @@ public void testValueOrdering() { .addEqualityGroup(wrap(blob(0, 1, 2, 4, 3))) .addEqualityGroup(wrap(blob(255))) + // bson binary data + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {})), + wrap(BsonBinaryData.fromByteString(1, ByteString.EMPTY))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {0}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2, 3}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(7, new byte[] {1}))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc2")))) @@ -180,6 +248,15 @@ public void testValueOrdering() { .addEqualityGroup(wrap(wrapRef(dbId("p1", "d2"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p2", "d1"), key("c1/doc1")))) + // bson object id + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea"))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new BsonObjectId("e\u0301b"))) + .addEqualityGroup(wrap(new BsonObjectId("æ"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new BsonObjectId("\u00e9a"))) + // geo points .addEqualityGroup(wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) @@ -194,6 +271,16 @@ public void testValueOrdering() { .addEqualityGroup(wrap(new GeoPoint(90, 0))) .addEqualityGroup(wrap(new GeoPoint(90, 180))) + // regex + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + .addEqualityGroup(wrap(new RegexValue("^foo", "m"))) + .addEqualityGroup(wrap(new RegexValue("^zoo", "i"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new RegexValue("e\u0301b", "i"))) + .addEqualityGroup(wrap(new RegexValue("æ", "i"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new RegexValue("\u00e9a", "i"))) + // arrays .addEqualityGroup(wrap(Arrays.asList("bar"))) .addEqualityGroup(wrap(Arrays.asList("foo", 1))) @@ -212,21 +299,31 @@ public void testValueOrdering() { .addEqualityGroup(wrap(map("foo", 1))) .addEqualityGroup(wrap(map("foo", 2))) .addEqualityGroup(wrap(map("foo", "0"))) + + // MaxKey is last + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @Test public void testLowerBound() { new ComparatorTester() - // null first + // lower bound of null is null .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap((Object) null))), wrap((Object) null)) + // lower bound of MinKey is MinKey + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(MinKey.instance()))), wrap(MinKey.instance())) + // booleans .addEqualityGroup(wrap(false), wrap(getLowerBound(TestUtil.wrap(true)))) .addEqualityGroup(wrap(true)) // numbers - .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(1.0))), wrap(Double.NaN)) + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(1.0))), + wrap(Double.NaN), + wrap(getLowerBound(TestUtil.wrap(new Int32Value(1))))) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(Long.MIN_VALUE)) @@ -234,6 +331,12 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(date1)))) .addEqualityGroup(wrap(date1)) + // bson timestamps + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonTimestamp(4294967295L, 4294967295L)))), + wrap(new BsonTimestamp(0, 0))) + .addEqualityGroup(wrap(new BsonTimestamp(1, 1))) + // strings .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap("foo"))), wrap("")) .addEqualityGroup(wrap("\000")) @@ -242,17 +345,35 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(blob(1, 2, 3)))), wrap(blob())) .addEqualityGroup(wrap(blob(0))) + // bson binary data + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})))), + wrap(BsonBinaryData.fromBytes(0, new byte[] {})), + wrap(BsonBinaryData.fromByteString((byte) 0, ByteString.EMPTY))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(0, new byte[] {0}))) + // resource names .addEqualityGroup( wrap(getLowerBound(wrapRef(dbId("foo", "bar"), key("x/y")))), wrap(wrapRef(dbId("", ""), key("")))) .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) + // bson object ids + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonObjectId("zzz")))), wrap(new BsonObjectId(""))) + .addEqualityGroup(wrap(new BsonObjectId("a"))) + // geo points .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(new GeoPoint(-90, 0)))), wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) + // regular expressions + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new RegexValue("^foo", "i")))), + wrap(new RegexValue("", ""))) + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + // arrays .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(Collections.singletonList(false)))), @@ -271,6 +392,9 @@ public void testLowerBound() { // objects .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(map("foo", "bar")))), wrap(map())) + + // maxKey + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @@ -279,13 +403,20 @@ public void testUpperBound() { new ComparatorTester() // null first .addEqualityGroup(wrap((Object) null)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap((Object) null)))) + + // upper value of null is MinKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap((Object) null))), wrap(MinKey.instance())) + + // upper value of MinKey is boolean `false` + .addEqualityGroup(wrap(false), wrap(getUpperBound(TestUtil.wrap(MinKey.instance())))) // booleans .addEqualityGroup(wrap(true)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false)))) // numbers + .addEqualityGroup(wrap(new Int32Value(2147483647))) // largest int32 value .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(1.0)))) @@ -294,6 +425,11 @@ public void testUpperBound() { .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(date1)))) + // bson timestamps + .addEqualityGroup( + wrap(new BsonTimestamp(4294967295L, 4294967295L))) // largest bson timestamp value + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonTimestamp(1, 1))))) + // strings .addEqualityGroup(wrap("\000")) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap("\000")))) @@ -302,14 +438,27 @@ public void testUpperBound() { .addEqualityGroup(wrap(blob(255))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(blob(255))))) + // bson binary data + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2}))) + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(BsonBinaryData.fromBytes(0, new byte[] {}))))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) .addEqualityGroup(wrap(getUpperBound(wrapRef(dbId("", ""), key("a/a"))))) + // bson object ids + .addEqualityGroup(wrap(new BsonObjectId("zzz"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonObjectId("a"))))) + // geo points .addEqualityGroup(wrap(new GeoPoint(90, 180))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new GeoPoint(90, 180))))) + // regular expressions + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new RegexValue("", ""))))) + // arrays .addEqualityGroup(wrap(Collections.singletonList(false))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(Collections.singletonList(false))))) @@ -325,7 +474,10 @@ public void testUpperBound() { // objects .addEqualityGroup(wrap(map("a", "b"))) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(map("a", "b"))))) + + // upper value of objects is MaxKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(map("a", "b")))), wrap(MaxKey.instance())) .testCompare(); } @@ -346,6 +498,20 @@ public void testCanonicalIds() { assertCanonicalId( TestUtil.wrap(map("a", Arrays.asList("b", map("c", new GeoPoint(30, 60))))), "{a:[b,{c:geo(30.0,60.0)}]}"); + + assertCanonicalId(TestUtil.wrap(new RegexValue("a", "b")), "{__regex__:{options:b,pattern:a}}"); + + assertCanonicalId(TestUtil.wrap(new BsonObjectId("foo")), "{__oid__:foo}"); + assertCanonicalId( + TestUtil.wrap(new BsonTimestamp(1, 2)), "{__request_timestamp__:{increment:2,seconds:1}}"); + assertCanonicalId((TestUtil.wrap(new Int32Value(1))), "{__int__:1}"); + assertCanonicalId( + TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); + assertCanonicalId( + TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), + "{__binary__:80010203}"); + assertCanonicalId(TestUtil.wrap(MinKey.instance()), "{__min__:null}"); + assertCanonicalId(TestUtil.wrap(MaxKey.instance()), "{__max__:null}"); } @Test @@ -358,6 +524,94 @@ private void assertCanonicalId(Value proto, String expectedCanonicalId) { assertEquals(expectedCanonicalId, Values.canonicalId(proto)); } + @Test + public void DetectsBsonTypesCorrectly() { + Value minKeyValue = TestUtil.wrap(MinKey.instance()); + Value maxKeyValue = TestUtil.wrap(MaxKey.instance()); + Value int32Value = TestUtil.wrap(new Int32Value(1)); + Value regexValue = TestUtil.wrap(new RegexValue("^foo", "i")); + Value bsonTimestampValue = TestUtil.wrap(new BsonTimestamp(1, 2)); + Value bsonObjectIdValue = TestUtil.wrap(new BsonObjectId("foo")); + Value bsonBinaryDataValue1 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {})); + Value bsonBinaryDataValue2 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); + + assertTrue(Values.isMinKey(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isMaxKey(minKeyValue.getMapValue().getFieldsMap())); + assertTrue(Values.isMaxKey(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isInt32Value(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(maxKeyValue.getMapValue().getFieldsMap())); + assertTrue(Values.isInt32Value(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isRegexValue(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(int32Value.getMapValue().getFieldsMap())); + assertTrue(Values.isRegexValue(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonTimestamp(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(regexValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonTimestamp(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonObjectId(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonTimestampValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonObjectId(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonBinaryData(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertEquals(Values.detectMapRepresentation(minKeyValue), MapRepresentation.MIN_KEY); + assertEquals(Values.detectMapRepresentation(maxKeyValue), MapRepresentation.MAX_KEY); + assertEquals(Values.detectMapRepresentation(int32Value), MapRepresentation.INT32); + assertEquals(Values.detectMapRepresentation(regexValue), MapRepresentation.REGEX); + assertEquals( + Values.detectMapRepresentation(bsonTimestampValue), MapRepresentation.BSON_TIMESTAMP); + assertEquals( + Values.detectMapRepresentation(bsonObjectIdValue), MapRepresentation.BSON_OBJECT_ID); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue1), MapRepresentation.BSON_BINARY); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue2), MapRepresentation.BSON_BINARY); + } + /** Small helper class that uses ProtoValues for equals() and compareTo(). */ static class EqualsWrapper implements Comparable { final Value proto; diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java index 52eec0ac4cd..26f665b38f2 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java @@ -39,9 +39,15 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.ArrayContainsAnyFilter; import com.google.firebase.firestore.core.FieldFilter; import com.google.firebase.firestore.core.InFilter; @@ -329,6 +335,138 @@ public void testEncodesVectorValue() { assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); } + @Test + public void testEncodesBsonObjectIds() { + Value model = wrap(new BsonObjectId("foo")); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__oid__", Value.newBuilder().setStringValue("foo").build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonTimestamps() { + Value model = wrap(new BsonTimestamp(12345, 67)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__request_timestamp__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "seconds", + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + "increment", Value.newBuilder().setIntegerValue(67).build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonBinaryData() { + Value model = wrap(BsonBinaryData.fromBytes(127, new byte[] {1, 2, 3})); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__binary__", + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {127, 1, 2, 3})) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesRegexValues() { + Value model = wrap(new RegexValue("^foo", "i")); + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__regex__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "pattern", + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + "options", Value.newBuilder().setStringValue("i").build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesInt32Values() { + Value model = wrap(new com.google.firebase.firestore.Int32Value(12345)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__int__", Value.newBuilder().setIntegerValue(12345).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMinKey() { + Value model = wrap(MinKey.instance()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__min__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMaxKey() { + Value model = wrap(MaxKey.instance()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__max__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + @Test public void testEncodeDeleteMutation() { Mutation mutation = deleteMutation("docs/1");