diff --git a/firebase-firestore/CHANGELOG.md b/firebase-firestore/CHANGELOG.md index 29416bcf9a4..7800f5b1c4a 100644 --- a/firebase-firestore/CHANGELOG.md +++ b/firebase-firestore/CHANGELOG.md @@ -1,5 +1,5 @@ # Unreleased - +* [feature] Add support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, BsonTimestamp, and BsonBinaryData. [#6928](//github.com/firebase/firebase-android-sdk/pull/6928) # 25.1.4 * [fixed] Fixed the `null` value handling in `whereNotEqualTo` and `whereNotIn` filters. diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index e3a55cf729c..f71721ba3f5 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -47,6 +47,25 @@ package com.google.firebase.firestore { method public byte[] toBytes(); } + public final class BsonBinaryData { + method public com.google.protobuf.ByteString dataAsByteString(); + method public byte[] dataAsBytes(); + method public static com.google.firebase.firestore.BsonBinaryData fromByteString(int, com.google.protobuf.ByteString); + method public static com.google.firebase.firestore.BsonBinaryData fromBytes(int, byte[]); + method public int subtype(); + } + + public final class BsonObjectId { + ctor public BsonObjectId(String); + field public final String! value; + } + + public final class BsonTimestamp { + ctor public BsonTimestamp(long, long); + field public final long increment; + field public final long seconds; + } + public class CollectionReference extends com.google.firebase.firestore.Query { method public com.google.android.gms.tasks.Task add(Object); method public com.google.firebase.firestore.DocumentReference document(); @@ -56,6 +75,11 @@ package com.google.firebase.firestore { method public String getPath(); } + public final class Decimal128Value { + ctor public Decimal128Value(String); + field public final String! stringValue; + } + public class DocumentChange { method public com.google.firebase.firestore.QueryDocumentSnapshot getDocument(); method public int getNewIndex(); @@ -109,17 +133,25 @@ package com.google.firebase.firestore { method public T? get(String, Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public com.google.firebase.firestore.Blob? getBlob(String); method public Boolean? getBoolean(String); + method public com.google.firebase.firestore.BsonBinaryData? getBsonBinaryData(String); + method public com.google.firebase.firestore.BsonObjectId? getBsonObjectId(String); + method public com.google.firebase.firestore.BsonTimestamp? getBsonTimestamp(String); method public java.util.Map? getData(); method public java.util.Map? getData(com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public java.util.Date? getDate(String); method public java.util.Date? getDate(String, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); + method public com.google.firebase.firestore.Decimal128Value? getDecimal128Value(String); method public com.google.firebase.firestore.DocumentReference? getDocumentReference(String); method public Double? getDouble(String); method public com.google.firebase.firestore.GeoPoint? getGeoPoint(String); method public String getId(); + method public com.google.firebase.firestore.Int32Value? getInt32Value(String); method public Long? getLong(String); + method public com.google.firebase.firestore.MaxKey? getMaxKey(String); method public com.google.firebase.firestore.SnapshotMetadata getMetadata(); + method public com.google.firebase.firestore.MinKey? getMinKey(String); method public com.google.firebase.firestore.DocumentReference getReference(); + method public com.google.firebase.firestore.RegexValue? getRegexValue(String); method public String? getString(String); method public com.google.firebase.Timestamp? getTimestamp(String); method public com.google.firebase.Timestamp? getTimestamp(String, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); @@ -303,6 +335,11 @@ package com.google.firebase.firestore { @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE}) public @interface IgnoreExtraProperties { } + public final class Int32Value { + ctor public Int32Value(int); + field public final int value; + } + public enum ListenSource { enum_constant public static final com.google.firebase.firestore.ListenSource CACHE; enum_constant public static final com.google.firebase.firestore.ListenSource DEFAULT; @@ -360,6 +397,10 @@ package com.google.firebase.firestore { public interface LocalCacheSettings { } + public final class MaxKey { + method public static com.google.firebase.firestore.MaxKey instance(); + } + public final class MemoryCacheSettings implements com.google.firebase.firestore.LocalCacheSettings { method public com.google.firebase.firestore.MemoryGarbageCollectorSettings getGarbageCollectorSettings(); method public static com.google.firebase.firestore.MemoryCacheSettings.Builder newBuilder(); @@ -396,6 +437,10 @@ package com.google.firebase.firestore { enum_constant public static final com.google.firebase.firestore.MetadataChanges INCLUDE; } + public final class MinKey { + method public static com.google.firebase.firestore.MinKey instance(); + } + public interface OnProgressListener { method public void onProgress(ProgressT); } @@ -491,6 +536,12 @@ package com.google.firebase.firestore { method public java.util.List toObjects(Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); } + public final class RegexValue { + ctor public RegexValue(String, String); + field public final String! options; + field public final String! pattern; + } + @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.FIELD}) public @interface ServerTimestamp { } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..badbbc7b0d5 --- /dev/null +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,781 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionWithDocsOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.TestUtil.map; +import static java.lang.Double.NaN; +import static java.lang.Double.POSITIVE_INFINITY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.firebase.firestore.Query.Direction; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(AndroidJUnit4.class) +public class BsonTypesTest { + + @Test + public void writeAndReadBsonTypes() throws ExecutionException, InterruptedException { + Map expected = new HashMap<>(); + + DocumentReference docRef = + waitFor( + testCollectionOnNightly() + .add( + map( + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "decimal128", new Decimal128Value("1.2e3"), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance()))); + + waitFor( + docRef.set( + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3)), + SetOptions.merge())); + + waitFor(docRef.update(map("int32", new Int32Value(2)))); + + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(2)); + expected.put("decimal128", new Decimal128Value("1.2e3")); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("decimal128") instanceof Decimal128Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + + @Test + public void writeAndReadBsonTypeOffline() throws ExecutionException, InterruptedException { + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference docRef = randomColl.document(); + + waitFor(randomColl.getFirestore().disableNetwork()); + + // Adding docs to cache, do not wait for promise to resolve. + Map expected = new HashMap<>(); + docRef.set( + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance())); + + docRef.update( + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3))); + + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(1)); + expected.put("decimal128", new Decimal128Value("1.2e3")); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("decimal128") instanceof Decimal128Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + + @Test + public void listenToDocumentsWithBsonTypes() throws Throwable { + final Semaphore semaphore = new Semaphore(0); + ListenerRegistration registration = null; + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference ref = randomColl.document(); + AtomicReference failureMessage = new AtomicReference(null); + int totalPermits = 5; + + try { + registration = + randomColl + .whereEqualTo("purpose", "Bson types tests") + .addSnapshotListener( + (value, error) -> { + try { + DocumentSnapshot docSnap = + value.isEmpty() ? null : value.getDocuments().get(0); + + switch (semaphore.availablePermits()) { + case 0: + assertNull(docSnap); + ref.set( + map( + "purpose", + "Bson types tests", + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance())); + break; + case 1: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonBinaryData("bsonBinary"), + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + new BsonObjectId("507f191e810c19729de860ea")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "i")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 2)); + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(1)); + assertEquals( + docSnap.getDecimal128Value("decimal128"), + new Decimal128Value("1.2e3")); + assertEquals(docSnap.getMinKey("minKey"), MinKey.instance()); + assertEquals(docSnap.getMaxKey("maxKey"), MaxKey.instance()); + + ref.set( + map( + "purpose", + "Bson types tests", + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860eb"), + "regex", + new RegexValue("^foo", "m"), + "bsonTimestamp", + new BsonTimestamp(1, 3)), + SetOptions.merge()); + break; + case 2: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + new BsonObjectId("507f191e810c19729de860eb")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "m")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 3)); + + ref.update(map("int32", new Int32Value(2))); + break; + case 3: + assertNotNull(docSnap); + + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(2)); + + ref.delete(); + break; + case 4: + assertNull(docSnap); + break; + } + } catch (Throwable t) { + failureMessage.set(t); + semaphore.release(totalPermits); + } + + semaphore.release(); + }); + + semaphore.acquire(totalPermits); + } finally { + if (registration != null) { + registration.remove(); + } + + if (failureMessage.get() != null) { + throw failureMessage.get(); + } + } + } + + @Test + public void filterAndOrderBsonObjectIds() throws Exception { + Map> docs = + map( + "a", + map("key", new BsonObjectId("507f191e810c19729de860ea")), + "b", + map("key", new BsonObjectId("507f191e810c19729de860eb")), + "c", + map("key", new BsonObjectId("507f191e810c19729de860ec"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderBsonTimestamps() throws Exception { + Map> docs = + map( + "a", + map("key", new BsonTimestamp(1, 1)), + "b", + map("key", new BsonTimestamp(1, 2)), + "c", + map("key", new BsonTimestamp(2, 1))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new BsonTimestamp(1, 1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new BsonTimestamp(1, 2)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderBsonBinaryData() throws Exception { + Map> docs = + map( + "a", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "b", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), + "c", + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderRegex() throws Exception { + Map> docs = + map( + "a", map("key", new RegexValue("^bar", "i")), + "b", map("key", new RegexValue("^bar", "m")), + "c", map("key", new RegexValue("^baz", "i"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new RegexValue("^bar", "i")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new RegexValue("^bar", "m")); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderInt32() throws Exception { + Map> docs = + map( + "a", map("key", new Int32Value(-1)), + "b", map("key", new Int32Value(1)), + "c", map("key", new Int32Value(2))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", new Int32Value(1)); + + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + } + + @Test + public void filterAndOrderDecimal128() throws Exception { + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.2e3")), + "b", + map("key", new Decimal128Value("0")), + "c", + map("key", new Decimal128Value("1.2e3")), + "d", + map("key", new Decimal128Value("NaN")), + "e", + map("key", new Decimal128Value("-Infinity")), + "f", + map("key", new Decimal128Value("Infinity"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new Decimal128Value("-1.2e3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new Decimal128Value("-1.2e-3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("0.0")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "a", "e", "d")); + + orderedQuery = randomColl.whereNotEqualTo("key", new Decimal128Value("NaN")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("e", "a", "b", "c", "f")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereEqualTo("key", new Decimal128Value("1.2e3")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("1.2e3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "b", "a", "e", "d")); + + // Note: server is sending NaN incorrectly, but the SDK NotInFilter.matches gracefully handles + // it and removes the incorrect doc "d". + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotIn( + "key", + Arrays.asList( + new Decimal128Value("1.2e3"), + new Decimal128Value("Infinity"), + new Decimal128Value("NaN"))); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("b", "a", "e")); + } + + @Test + public void filterAndOrderMinKey() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", MinKey.instance()), + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // minKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", MinKey.instance()); + + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "a")); + + query = randomColl.whereNotEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "e")); + + query = randomColl.whereGreaterThanOrEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereLessThanOrEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThan("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + } + + @Test + public void filterAndOrderMaxKey() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", 1L), + "c", map("key", MaxKey.instance()), + "d", map("key", MaxKey.instance()), + "e", map("key", null)); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", MaxKey.instance()); + + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "c")); + + query = randomColl.whereNotEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThanOrEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThanOrEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThan("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); + } + + @Test + public void filterNullValueWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", MinKey.instance()), + "b", map("key", null), + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query query = randomColl.whereEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "c")); + + query = randomColl.whereNotEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend( + randomColl, query, docs, Arrays.asList("a", "d", "e")); + } + + @Test + public void filterAndOrderNumericalValues() throws Exception { + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.2e3")), // -1200 + "b", + map("key", new Int32Value(0)), + "c", + map("key", new Decimal128Value("1")), + "d", + map("key", new Int32Value(1)), + "e", + map("key", 1L), + "f", + map("key", 1.0), + "g", + map("key", new Decimal128Value("1.2e-3")), // 0.0012 + "h", + map("key", new Int32Value(2)), + "i", + map("key", new Decimal128Value("NaN")), + "j", + map("key", new Decimal128Value("-Infinity")), + "k", + map("key", NaN), + "l", + map("key", POSITIVE_INFINITY)); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); + assertSDKQueryResultsConsistentWithBackend( + randomColl, + orderedQuery, + docs, + Arrays.asList( + "l", // Infinity + "h", // 2 + "f", // 1.0 + "e", // 1 + "d", // 1 + "c", // 1 + "g", // 0.0012 + "b", // 0 + "a", // -1200 + "j", // -Infinity + "k", // NaN + "i" // NaN + )); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("1.0")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("l", "h", "g", "b", "a", "j", "k", "i")); + + orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereEqualTo("key", 1); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "e", "d", "c")); + } + + @Test + public void decimal128ValuesWithNo2sComplementRepresentation() throws Exception { + // For decimal128 values with no 2's complement representation, it is considered not equal to + // a double with the same value, e.g, 1.1. + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.1e-3")), // -0.0011 + "b", + map("key", new Decimal128Value("1.1")), + "c", + map("key", 1.1), + "d", + map("key", 1.0), + "e", + map("key", new Decimal128Value("1.1e-3")) // 0.0011 + ); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.whereEqualTo("key", new Decimal128Value("1.1")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("b")); + + orderedQuery = randomColl.whereNotEqualTo("key", new Decimal128Value("1.1")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("a", "e", "d", "c")); + + orderedQuery = randomColl.whereEqualTo("key", 1.1); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c")); + + orderedQuery = randomColl.whereNotEqualTo("key", 1.1); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("a", "e", "d", "b")); + } + + @Test + public void orderBsonTypesTogether() throws Exception { + Map> docs = + map( + "bsonObjectId1", + map("key", new BsonObjectId("507f191e810c19729de860ea")), + "bsonObjectId2", + map("key", new BsonObjectId("507f191e810c19729de860eb")), + "bsonObjectId3", + map("key", new BsonObjectId("407f191e810c19729de860ea")), + "regex1", + map("key", new RegexValue("^bar", "m")), + "regex2", + map("key", new RegexValue("^bar", "i")), + "regex3", + map("key", new RegexValue("^baz", "i")), + "bsonTimestamp1", + map("key", new BsonTimestamp(2, 0)), + "bsonTimestamp2", + map("key", new BsonTimestamp(1, 2)), + "bsonTimestamp3", + map("key", new BsonTimestamp(1, 1)), + "bsonBinary1", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "bsonBinary2", + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), + "bsonBinary3", + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2})), + "int32Value1", + map("key", new Int32Value(-1)), + "int32Value2", + map("key", new Int32Value(1)), + "int32Value3", + map("key", new Int32Value(0)), + "decimal128Value1", + map("key", new Decimal128Value("-1.2e3")), + "decimal128Value2", + map("key", new Decimal128Value("-0.0")), + "decimal128Value3", + map("key", new Decimal128Value("1.2e3")), + "minKey1", + map("key", MinKey.instance()), + "minKey2", + map("key", MinKey.instance()), + "maxKey1", + map("key", MaxKey.instance()), + "maxKey2", + map("key", MaxKey.instance())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); + List expectedDocs = + Arrays.asList( + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + // Int32Value and Decimal128Value are sorted together + "decimal128Value3", + "int32Value2", + // Int32Value of 0 equals to Decimal128Value of 0, and falls to document key as second + // order + "int32Value3", + "decimal128Value2", + "int32Value1", + "decimal128Value1", + "minKey2", + "minKey1"); + + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, expectedDocs); + } + + @Test + public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", new BsonObjectId("507f191e810c19729de860ea")), + "b", map("key", new RegexValue("^foo", "i")), + "c", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + waitFor( + randomColl.firestore.runTransaction( + transaction -> { + DocumentSnapshot docSnap = transaction.get(randomColl.document("a")); + assertEquals( + docSnap.getBsonObjectId("key"), new BsonObjectId("507f191e810c19729de860ea")); + transaction.update(randomColl.document("b"), "key", new RegexValue("^bar", "i")); + transaction.delete(randomColl.document("c")); + return null; + })); + + QuerySnapshot getSnapshot = waitFor(randomColl.get()); + + List getSnapshotDocIds = + getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); + + assertTrue(getSnapshotDocIds.equals(Arrays.asList("a", "b"))); + assertEquals( + getSnapshot.getDocuments().get(0).getBsonObjectId("key"), + new BsonObjectId("507f191e810c19729de860ea")); + assertEquals( + getSnapshot.getDocuments().get(1).getRegexValue("key"), new RegexValue("^bar", "i")); + } +} diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java index 6afbd54b60f..84f0e0f3000 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java @@ -15,6 +15,7 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.AccessHelper.getAsyncQueue; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.checkOnlineAndOfflineResultsMatch; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.isRunningAgainstEmulator; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.newTestSettings; @@ -64,7 +65,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; -import java.util.stream.Collectors; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; @@ -1497,28 +1497,27 @@ public void testCanGetSameOrDifferentPersistentCacheIndexManager() { } @Test - public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); - List expectedDocIds = + + List expectedDocs = Arrays.asList( "__id-9223372036854775808__", "__id-2__", @@ -1534,106 +1533,61 @@ public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { "_id1__", "a"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocs); } @Test - public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query filteredQuery = colRef .whereGreaterThan(FieldPath.documentId(), "__id7__") .whereLessThanOrEqualTo(FieldPath.documentId(), "A") .orderBy(FieldPath.documentId()); - List expectedDocIds = - Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - - QuerySnapshot getSnapshot = waitFor(filteredQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - filteredQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocs = + Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(colRef, filteredQuery, testDocs, expectedDocs); } @Test public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - // Test query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "__id-9223372036854775808__", @@ -1655,7 +1609,7 @@ public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { } @Test - public void snapshotListenerSortsUnicodeStringsAsServer() { + public void snapshotListenerSortsUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1683,36 +1637,15 @@ public void snapshotListenerSortsUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { + public void snapshotListenerSortsUnicodeStringsInArrayAsServer() throws Exception { Map> testDocs = map( "a", @@ -1740,36 +1673,15 @@ public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapAsServer() throws Exception { Map> testDocs = map( "a", @@ -1797,36 +1709,15 @@ public void snapshotListenerSortsUnicodeStringsInMapAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() throws Exception { Map> testDocs = map( "a", @@ -1854,36 +1745,15 @@ public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() throws Exception { Map> testDocs = map( "Łukasiewicz", @@ -1911,38 +1781,16 @@ public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "Sierpiński", "Łukasiewicz", "你好", "你顥", "岩澤", "︒", "P", "🄟", "🐵", "😀", "😁"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { - // Note: Protocol Buffer converts any invalid surrogates to "?". + public void snapshotListenerSortsInvalidUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1962,30 +1810,27 @@ public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); - List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + // Note: Protocol Buffer converts any invalid surrogates to "?". + Map> actualDocs = + map( + "a", + map("value", "Z"), + "b", + map("value", "你好"), + "c", + map("value", "😀"), + "d", + map("value", "ab?"), + "e", + map("value", "ab?"), + "f", + map("value", "ab??"), + "g", + map("value", "ab??")); - checkOnlineAndOfflineResultsMatch(colRef, orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, actualDocs, expectedDocIds); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 8cceddb7188..cd463f457a1 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -15,7 +15,9 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocument; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; import static com.google.firebase.firestore.testutil.TestUtil.expectError; import static com.google.firebase.firestore.testutil.TestUtil.map; @@ -47,6 +49,14 @@ public static final class POJO { Blob blob; GeoPoint geoPoint; DocumentReference documentReference; + BsonObjectId bsonObjectId; + BsonBinaryData bsonBinaryData; + BsonTimestamp bsonTimestamp; + RegexValue regexValue; + Int32Value int32Value; + Decimal128Value decimal128Value; + MinKey minKey; + MaxKey maxKey; public POJO() {} @@ -60,6 +70,14 @@ public POJO(double number, String str, DocumentReference documentReference) { this.timestamp = new Timestamp(123, 123456000); this.blob = Blob.fromBytes(new byte[] {3, 1, 4, 1, 5}); this.geoPoint = new GeoPoint(3.1415, 9.2653); + this.bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + this.bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {3, 1, 4, 1, 5}); + this.bsonTimestamp = new BsonTimestamp(1, 2); + this.regexValue = new RegexValue("^foo", "i"); + this.int32Value = new Int32Value(1); + this.decimal128Value = new Decimal128Value("1.2e3"); + this.minKey = MinKey.instance(); + this.maxKey = MaxKey.instance(); } public double getNumber() { @@ -118,6 +136,70 @@ public void setDocumentReference(DocumentReference documentReference) { this.documentReference = documentReference; } + public BsonObjectId getBsonObjectId() { + return bsonObjectId; + } + + public void setBsonObjectId(BsonObjectId bsonObjectId) { + this.bsonObjectId = bsonObjectId; + } + + public BsonBinaryData getBsonBinaryData() { + return bsonBinaryData; + } + + public void setBsonBinaryData(BsonBinaryData bsonBinaryData) { + this.bsonBinaryData = bsonBinaryData; + } + + public BsonTimestamp getBsonTimestamp() { + return bsonTimestamp; + } + + public void setBsonTimestamp(BsonTimestamp bsonTimestamp) { + this.bsonTimestamp = bsonTimestamp; + } + + public RegexValue getRegexValue() { + return regexValue; + } + + public void setRegexValue(RegexValue regexValue) { + this.regexValue = regexValue; + } + + public Int32Value getInt32Value() { + return int32Value; + } + + public void setInt32Value(Int32Value int32Value) { + this.int32Value = int32Value; + } + + public Decimal128Value getDecimal128Value() { + return decimal128Value; + } + + public void setDecimal128Value(Decimal128Value decimal128Value) { + this.decimal128Value = decimal128Value; + } + + public MinKey getMinKey() { + return minKey; + } + + public void setMinKey(MinKey minKey) { + this.minKey = minKey; + } + + public MaxKey getMaxKey() { + return maxKey; + } + + public void setMaxKey(MaxKey maxKey) { + this.maxKey = maxKey; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -147,6 +229,30 @@ public boolean equals(Object o) { if (!geoPoint.equals(pojo.geoPoint)) { return false; } + if (!bsonBinaryData.equals(pojo.bsonBinaryData)) { + return false; + } + if (!bsonTimestamp.equals(pojo.bsonTimestamp)) { + return false; + } + if (!bsonObjectId.equals(pojo.bsonObjectId)) { + return false; + } + if (!regexValue.equals(pojo.regexValue)) { + return false; + } + if (!int32Value.equals(pojo.int32Value)) { + return false; + } + if (!decimal128Value.equals(pojo.decimal128Value)) { + return false; + } + if (!minKey.equals(pojo.minKey)) { + return false; + } + if (!maxKey.equals(pojo.maxKey)) { + return false; + } // TODO: Implement proper equality on DocumentReference. return documentReference.getPath().equals(pojo.documentReference.getPath()); @@ -164,6 +270,14 @@ public int hashCode() { result = 31 * result + blob.hashCode(); result = 31 * result + geoPoint.hashCode(); result = 31 * result + documentReference.getPath().hashCode(); + result = 31 * result + bsonObjectId.hashCode(); + result = 31 * result + bsonBinaryData.hashCode(); + result = 31 * result + bsonTimestamp.hashCode(); + result = 31 * result + regexValue.hashCode(); + result = 31 * result + int32Value.hashCode(); + result = 31 * result + decimal128Value.hashCode(); + result = 31 * result + minKey.hashCode(); + result = 31 * result + maxKey.hashCode(); return result; } } @@ -236,7 +350,7 @@ public void tearDown() { @Test public void testWriteAndRead() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -260,7 +374,7 @@ public void testDocumentIdAnnotation() { @Test public void testSetMerge() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -277,7 +391,7 @@ public void testSetMerge() { // General smoke test that makes sure APIs accept POJOs. @Test public void testAPIsAcceptPOJOsForFields() { - DocumentReference ref = testDocument(); + DocumentReference ref = testDocumentOnNightly(); ArrayList> tasks = new ArrayList<>(); // as Map<> entries in a set() call. @@ -296,7 +410,7 @@ public void testAPIsAcceptPOJOsForFields() { // as Query parameters. data.setBlob(null); // blobs are broken, see b/117680212 - tasks.add(testCollection().whereEqualTo("field", data).get()); + tasks.add(testCollectionOnNightly().whereEqualTo("field", data).get()); waitFor(Tasks.whenAll(tasks)); } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index 664c2207843..5d618549f6c 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -14,17 +14,23 @@ package com.google.firebase.firestore; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.writeTestDocsOnCollection; import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.map; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.firebase.Timestamp; import com.google.firebase.firestore.testutil.IntegrationTestUtil; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -87,9 +93,267 @@ public void testCanReadAndWriteDates() { verifySuccessfulWriteReadCycle(map("date", new Timestamp(date)), testDoc()); } + @Test + public void testCanReadAndWriteDocumentReferences() { + DocumentReference docRef = testDoc(); + Map data = map("a", 42L, "ref", docRef); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteDocumentReferencesInLists() { + DocumentReference docRef = testDoc(); + List refs = Collections.singletonList(docRef); + Map data = map("a", 42L, "refs", refs); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteMinKey() { + verifySuccessfulWriteReadCycle(map("minKey", MinKey.instance()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteMaxKey() { + verifySuccessfulWriteReadCycle(map("maxKey", MaxKey.instance()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteRegexValue() { + verifySuccessfulWriteReadCycle( + map("regex", new RegexValue("^foo", "i")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteInt32Value() { + verifySuccessfulWriteReadCycle(map("int32", new Int32Value(1)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteDecimal128Value() { + Map decimal128Values = + map( + "decimalSciPositive", new Decimal128Value("1.2e3"), + "decimalSciNegative", new Decimal128Value("-1.2e3"), + "decimalSciNegativeExponent", new Decimal128Value("1.2e-3"), + "decimalSciNegativeValueAndExponent", new Decimal128Value("-1.2e-3"), + "decimalSciExplicitPositiveExponent", new Decimal128Value("1.2e+3"), + "decimalFloatPositive", new Decimal128Value("1.1"), + "decimalIntNegative", new Decimal128Value("-1"), + "decimalZeroNegative", new Decimal128Value("-0"), + "decimalZeroInt", new Decimal128Value("0"), + "decimalZeroFloat", new Decimal128Value("0.0"), + "decimalNaN", new Decimal128Value("NaN"), + "decimalInfinityPositive", new Decimal128Value("Infinity"), + "decimalInfinityNegative", new Decimal128Value("-Infinity")); + verifySuccessfulWriteReadCycle(decimal128Values, testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTimestampValue() { + verifySuccessfulWriteReadCycle( + map("bsonTimestamp", new BsonTimestamp(1, 2)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonObjectIdValue() { + verifySuccessfulWriteReadCycle( + map("bsonObjectId", new BsonObjectId("507f191e810c19729de860ea")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonBinaryValue() { + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromByteString(255, ByteString.EMPTY)), + testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInLists() { + List data = + Arrays.asList( + new BsonObjectId("507f191e810c19729de860ea"), + new RegexValue("^foo", "i"), + new BsonTimestamp(1, 2), + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + new Int32Value(1), + new Decimal128Value("1.2e3"), + MinKey.instance(), + MaxKey.instance()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInMaps() { + Map data = + map( + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void invalidRegexGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new RegexValue("foo", "a")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'")); + } + + @Test + public void invalidDecimal128ValueGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("abc")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("1 23.45")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("1e1234567890")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Exponent too large")); + } + + @Test + public void invalidBsonObjectIdGetsRejected() throws Exception { + Exception error = null; + try { + // bsonObjectId with length not equal to 24 gets rejected + waitFor(testDocumentOnNightly().set(map("key", new BsonObjectId("foobar")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Object ID hex string has incorrect length.")); + } + + @Test + public void invalidBsonBinaryDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor( + testDocumentOnNightly() + .set(map("key", BsonBinaryData.fromBytes(1234, new byte[] {1, 2, 3})))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range.")); + } + + @Test + public void invalidBsonTimestampDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(-1, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(4294967296L, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (4294967296) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, -1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, 4294967296L)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (4294967296) does not represent an unsigned 32-bit integer.")); + } + @Test public void testCanUseTypedAccessors() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map( "null", @@ -111,7 +375,29 @@ public void testCanUseTypedAccessors() { "timestamp", new Timestamp(100, 123000000), "reference", - doc); + doc, + "array", + Arrays.asList(1.0, "foo", map("nested", true), null), + "map", + map("key", true), + "vector", + FieldValue.vector(new double[] {1, 2, 3}), + "regex", + new RegexValue("^foo", "i"), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance()); waitFor(doc.set(data)); DocumentSnapshot snapshot = waitFor(doc.get()); @@ -132,11 +418,22 @@ public void testCanUseTypedAccessors() { assertEquals(timestamp.toDate(), snapshot.getDate("timestamp")); assertTrue(data.get("reference") instanceof DocumentReference); assertEquals(((DocumentReference) data.get("reference")).getPath(), doc.getPath()); + assertEquals(data.get("array"), snapshot.get("array")); + assertEquals(data.get("map"), snapshot.get("map")); + assertEquals(data.get("vector"), snapshot.getVectorValue("vector")); + assertEquals(data.get("regex"), snapshot.getRegexValue("regex")); + assertEquals(data.get("int32"), snapshot.getInt32Value("int32")); + assertEquals(data.get("decimal128"), snapshot.getDecimal128Value("decimal128")); + assertEquals(data.get("bsonTimestamp"), snapshot.getBsonTimestamp("bsonTimestamp")); + assertEquals(data.get("bsonObjectId"), snapshot.getBsonObjectId("bsonObjectId")); + assertEquals(data.get("bsonBinary"), snapshot.getBsonBinaryData("bsonBinary")); + assertEquals(data.get("minKey"), snapshot.getMinKey("minKey")); + assertEquals(data.get("maxKey"), snapshot.getMaxKey("maxKey")); } @Test public void testTypeAccessorsCanReturnNull() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map(); waitFor(doc.set(data)); @@ -153,20 +450,97 @@ public void testTypeAccessorsCanReturnNull() { assertNull(snapshot.getDate("missing")); assertNull(snapshot.getTimestamp("missing")); assertNull(snapshot.getDocumentReference("missing")); + assertNull(snapshot.getVectorValue("missing")); + assertNull(snapshot.getRegexValue("missing")); + assertNull(snapshot.getInt32Value("missing")); + assertNull(snapshot.getDecimal128Value("missing")); + assertNull(snapshot.getBsonTimestamp("missing")); + assertNull(snapshot.getBsonObjectId("missing")); + assertNull(snapshot.getBsonBinaryData("missing")); + assertNull(snapshot.getMinKey("missing")); + assertNull(snapshot.getMaxKey("missing")); } @Test - public void testCanReadAndWriteDocumentReferences() { - DocumentReference docRef = testDoc(); - Map data = map("a", 42L, "ref", docRef); - verifySuccessfulWriteReadCycle(data, docRef); - } + public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { + CollectionReference colRef = testCollectionOnNightly(); + // Document reference needs to be created first to make sure it is using the same firestore + // instance in creation + DocumentReference docRef = colRef.document("testDocRef"); - @Test - public void testCanReadAndWriteDocumentReferencesInLists() { - DocumentReference docRef = testDoc(); - List refs = Collections.singletonList(docRef); - Map data = map("a", 42L, "refs", refs); - verifySuccessfulWriteReadCycle(data, docRef); + Map> testDocs = + map( + "null", + map("value", null), + "min", + map("value", MinKey.instance()), + "boolean", + map("value", true), + "nan", + map("value", Double.NaN), + "int32", + map("value", new Int32Value(1)), + "decimal128", + map("value", new Decimal128Value("1.2e3")), + "double", + map("value", 1.0), + "int", + map("value", 1L), + "timestamp", + map("value", new Timestamp(100, 123000000)), + "bsonTimestamp", + map("value", new BsonTimestamp(1, 2)), + "string", + map("value", "a"), + "bytes", + map("value", blob(1, 2, 3)), + "bsonBinary", + map("value", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "reference", + map("value", docRef), + "bsonObjectId", + map("value", new BsonObjectId("507f191e810c19729de860ea")), + "geoPoint", + map("value", new GeoPoint(1.23, 4.56)), + "regex", + map("value", new RegexValue("^foo", "i")), + "array", + map("value", Arrays.asList(1.0, "foo", map("key", true), null)), + "vector", + map("value", FieldValue.vector(new double[] {1, 2, 3})), + "map", + map("value", map("key", true)), + "max", + map("value", MaxKey.instance())); + + writeTestDocsOnCollection(colRef, testDocs); + + Query orderedQuery = colRef.orderBy("value"); + List expectedDocs = + Arrays.asList( + "null", + "min", + "boolean", + "nan", + "double", + "int", + "int32", + "decimal128", + "timestamp", + "bsonTimestamp", + "string", + "bytes", + "bsonBinary", + "reference", + "bsonObjectId", + "geoPoint", + "regex", + "array", + "vector", + "map", + "max"); + + // Assert that get and snapshot listener requests sort docs in the same, expected order + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocs); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java index dd676b5f0ab..be27c7a5200 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java @@ -18,7 +18,9 @@ import static com.google.firebase.firestore.util.Util.autoId; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import android.content.Context; import android.os.StrictMode; @@ -205,6 +207,36 @@ public static FirebaseFirestore testFirestore() { return testFirestore(newTestSettings()); } + public static FirebaseFirestore testFirestoreOnNightly() { + FirebaseFirestoreSettings settings = + new FirebaseFirestoreSettings.Builder() + .setHost("test-firestore.sandbox.googleapis.com") + .setSslEnabled(true) + .build(); + + DatabaseId databaseId = DatabaseId.forDatabase("firestore-sdk-nightly", "(default)"); + + String persistenceKey = "db" + firestoreStatus.size(); + + return testFirestore(databaseId, Level.DEBUG, settings, persistenceKey); + } + + public static CollectionReference testCollectionOnNightly() { + return testFirestoreOnNightly().collection(autoId()); + } + + public static DocumentReference testDocumentOnNightly() { + return testCollectionOnNightly().document(); + } + + public static CollectionReference testCollectionWithDocsOnNightly( + Map> docs) { + CollectionReference collection = testCollectionOnNightly(); + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + return collection; + } + /** * Initializes a new Firestore instance that uses the default project, customized with the * provided settings. @@ -366,6 +398,12 @@ public static CollectionReference testCollectionWithDocs(Map> docs) { + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + } + public static void writeAllDocs( CollectionReference collection, Map> docs) { WriteBatch writeBatch = null; @@ -561,4 +599,69 @@ public static void checkOnlineAndOfflineResultsMatch( assertEquals(expectedDocIds, querySnapshotToIds(docsFromServer)); } } + + // Asserts that the given query produces the expected result for all of the + // following scenarios: + // 1. Performing the given query using source=server, compare with expected result and populate + // cache. + // 2. Performing the given query using source=cache, compare with server result and expected + // result. + // 3. Using a snapshot listener to raise snapshots from cache and server, compare them with + // expected result. + public static void assertSDKQueryResultsConsistentWithBackend( + Query collection, + Query query, + Map> allData, + List expectedDocIds) + throws Exception { + // Check the cache round trip first to make sure cache is properly populated, otherwise the + // snapshot listener below will return partial results from previous + // "assertSDKQueryResultsConsistentWithBackend" calls if it is called multiple times in one test + checkOnlineAndOfflineResultsMatch(collection, query, expectedDocIds.toArray(new String[0])); + + EventAccumulator eventAccumulator = new EventAccumulator<>(); + ListenerRegistration registration = + query.addSnapshotListener(MetadataChanges.INCLUDE, eventAccumulator.listener()); + List watchSnapshots; + try { + watchSnapshots = eventAccumulator.await(2); + } finally { + registration.remove(); + } + assertTrue(watchSnapshots.get(0).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(0), allData, expectedDocIds); + assertFalse(watchSnapshots.get(1).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(1), allData, expectedDocIds); + } + + public static void verifySnapshot( + QuerySnapshot snapshot, + Map> allData, + List expectedDocIds) { + List snapshotDocIds = querySnapshotToIds(snapshot); + assertEquals( + String.format( + "Did not get the same document size. Expected doc size: %d, Actual doc size: %d ", + expectedDocIds.size(), snapshotDocIds.size()), + expectedDocIds.size(), + snapshotDocIds.size()); + assertTrue( + String.format( + "Did not get the expected document IDs. Expected doc IDs: %s, Actual doc IDs: %s ", + expectedDocIds, snapshotDocIds), + expectedDocIds.equals(snapshotDocIds)); + + Map actualDocs = toDataMap(snapshot); + + for (String docId : expectedDocIds) { + Map expectedDoc = allData.get(docId); + Map actualDoc = (Map) actualDocs.get(docId); + + assertTrue( + String.format( + "Did not get the expected document content. Expected doc: %s, Actual doc: %s ", + expectedDoc, actualDoc), + expectedDoc.equals(actualDoc)); + } + } } diff --git a/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java b/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java index 6af2ea76995..acb2ba06da5 100644 --- a/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java +++ b/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java @@ -14,6 +14,8 @@ package com.google.cloud.datastore.core.number; +import com.google.firebase.firestore.Quadruple; + /** A utility class for comparing numbers. */ public final class NumberComparisonHelper { @@ -95,5 +97,28 @@ public static int firestoreCompareDoubles(double leftDouble, double rightDouble) } } + /** + * Compares Quadruples with Firestore query semantics: NaN precedes all other numbers and equals + * itself, all zeroes are equal. + * + * @return a negative integer, zero, or a positive integer as the first argument is less than, + * equal to, or greater than the second. + */ + public static int firestoreCompareQuadruples(Quadruple left, Quadruple right) { + // For the purposes of comparison, Firestore considers -0 and +0 to be equal. + if ((left.isZero() && right.isZero())) { + return 0; + } + + // NaN sorts equal to itself and before any other number. + if (left.isNaN()) { + return right.isNaN() ? 0 : -1; + } else if (right.isNaN()) { + return 1; + } + + return left.compareTo(right); + } + private NumberComparisonHelper() {} } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java new file mode 100644 index 00000000000..19e64892012 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java @@ -0,0 +1,118 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import com.google.protobuf.ByteString; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** Represents a BSON Binary data type in Firestore documents. */ +public final class BsonBinaryData { + private final int subtype; + private final ByteString data; + + private BsonBinaryData(int subtype, @Nonnull ByteString data) { + // By definition the subtype should be 1 byte and should therefore + // have a value between 0 and 255 + if (subtype < 0 || subtype > 255) { + throw new IllegalArgumentException( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range."); + } + this.subtype = subtype; + this.data = data; + } + + /** + * Creates a new BsonBinaryData instance from the provided ByteString and subtype. + * + * @param subtype The subtype to use for this instance. + * @param byteString The byteString to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromByteString(int subtype, @Nonnull ByteString byteString) { + return new BsonBinaryData(subtype, byteString); + } + + /** + * Creates a new BsonBinaryData instance from the provided bytes and subtype. Makes a copy of the + * bytes passed in. + * + * @param subtype The subtype to use for this instance. + * @param bytes The bytes to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromBytes(int subtype, @Nonnull byte[] bytes) { + return new BsonBinaryData(subtype, ByteString.copyFrom(bytes)); + } + + /** + * Returns the underlying data as a ByteString. + * + * @return The data as a ByteString. + */ + @Nonnull + public ByteString dataAsByteString() { + return data; + } + + /** + * Returns a copy of the underlying data as a byte[] array. + * + * @return The data as a byte[] array. + */ + @Nonnull + public byte[] dataAsBytes() { + return data.toByteArray(); + } + + /** + * Returns the subtype of this binary data. + * + * @return The subtype of the binary data. + */ + public int subtype() { + return subtype; + } + + /** + * Returns true if this BsonBinaryData is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonBinaryData is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonBinaryData)) { + return false; + } + BsonBinaryData other = (BsonBinaryData) obj; + return subtype == other.subtype && data.equals(other.data); + } + + @Override + public int hashCode() { + return Objects.hash(subtype, data); + } + + @Override + public String toString() { + return "BsonBinaryData{subtype=" + subtype + ", data=" + data + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java new file mode 100644 index 00000000000..2e7e12c3ad8 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java @@ -0,0 +1,59 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a BSON ObjectId type in Firestore documents. */ +public final class BsonObjectId { + public final String value; + + /** + * Constructor that creates a new BSON ObjectId value with the given value. + * + * @param oid The 24-character hex string representing the ObjectId. + */ + public BsonObjectId(@NonNull String oid) { + this.value = oid; + } + + /** + * Returns true if this BsonObjectId is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonObjectId is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonObjectId)) { + return false; + } + BsonObjectId other = (BsonObjectId) obj; + return value.equals(other.value); + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + return "BsonObjectId{value='" + value + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java new file mode 100644 index 00000000000..394a46cab84 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java @@ -0,0 +1,72 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a BSON Timestamp type in Firestore documents. */ +public final class BsonTimestamp { + public final long seconds; + public final long increment; + + /** + * Constructor that creates a new BSON Timestamp value with the given values. + * + * @param seconds An unsigned 32-bit integer value stored as long representing the seconds. + * @param increment An unsigned 32-bit integer value stored as long representing the increment. + */ + public BsonTimestamp(long seconds, long increment) { + if (seconds < 0 || seconds > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'seconds' value (%s) does not represent an unsigned 32-bit integer.", + seconds)); + } + if (increment < 0 || increment > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'increment' value (%s) does not represent an unsigned 32-bit integer.", + increment)); + } + this.seconds = seconds; + this.increment = increment; + } + + /** + * Returns true if this BsonTimestampValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonTimestampValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonTimestamp)) { + return false; + } + BsonTimestamp other = (BsonTimestamp) obj; + return seconds == other.seconds && increment == other.increment; + } + + @Override + public int hashCode() { + return (int) (31 * seconds + increment); + } + + @Override + public String toString() { + return "BsonTimestampValue{seconds=" + seconds + ", increment=" + increment + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java new file mode 100644 index 00000000000..173a917969f --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java @@ -0,0 +1,68 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; +import java.util.Objects; + +/** Represents a 128-bit decimal type in Firestore documents. */ +public final class Decimal128Value { + public final String stringValue; + final Quadruple value; + + public Decimal128Value(@NonNull String val) { + this.stringValue = val; + this.value = Quadruple.fromString(val); + } + + /** + * Returns true if this Decimal128Value is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this Decimal128Value is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Quadruple otherValue = ((Decimal128Value) obj).value; + + // Firestore considers +0 and -0 to be equal. + if (this.value.isZero() && otherValue.isZero()) { + return true; + } + return this.value.compareTo(otherValue) == 0; + } + + @Override + public int hashCode() { + // Since +0 and -0 are considered equal, they should have the same hash code. + if (this.value.isZero()) { + return Objects.hash(Quadruple.POSITIVE_ZERO); + } + return this.value.hashCode(); + } + + @Override + public String toString() { + return "Decimal128Value{value=" + this.stringValue + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java index 4540608fc48..1e09299675c 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java @@ -497,6 +497,102 @@ public VectorValue getVectorValue(@NonNull String field) { return (VectorValue) get(field); } + /** + * Returns the value of the field as a MinKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MinKey. + * @return The value of the field. + */ + @Nullable + public MinKey getMinKey(@NonNull String field) { + return (MinKey) get(field); + } + + /** + * Returns the value of the field as a MaxKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MaxKey. + * @return The value of the field. + */ + @Nullable + public MaxKey getMaxKey(@NonNull String field) { + return (MaxKey) get(field); + } + + /** + * Returns the value of the field as a RegexValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a RegexValue. + * @return The value of the field. + */ + @Nullable + public RegexValue getRegexValue(@NonNull String field) { + return (RegexValue) get(field); + } + + /** + * Returns the value of the field as a 32-bit integer. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a Int32Value. + * @return The value of the field. + */ + @Nullable + public Int32Value getInt32Value(@NonNull String field) { + return (Int32Value) get(field); + } + + /** + * Returns the value of the field as a 128-bit decimal. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a Decimal128Value. + * @return The value of the field. + */ + @Nullable + public Decimal128Value getDecimal128Value(@NonNull String field) { + return (Decimal128Value) get(field); + } + + /** + * Returns the value of the field as a BsonObjectId. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonObjectId. + * @return The value of the field. + */ + @Nullable + public BsonObjectId getBsonObjectId(@NonNull String field) { + return (BsonObjectId) get(field); + } + + /** + * Returns the value of the field as a BsonTimestampValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonTimestampValue. + * @return The value of the field. + */ + @Nullable + public BsonTimestamp getBsonTimestamp(@NonNull String field) { + return (BsonTimestamp) get(field); + } + + /** + * Returns the value of the field as a BsonBinaryData. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonBinaryData. + * @return The value of the field. + */ + @Nullable + public BsonBinaryData getBsonBinaryData(@NonNull String field) { + return (BsonBinaryData) get(field); + } + @Nullable private T getTypedValue(String field, Class clazz) { checkNotNull(field, "Provided field must not be null."); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java index 48f67e50e12..f899457acdb 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java @@ -48,7 +48,7 @@ String getMethodName() { } } - /* {@code FieldValue} class for {@link #arrayUnion()} transforms. */ + /** {@code FieldValue} class for {@link #arrayUnion()} transforms. */ static class ArrayUnionFieldValue extends FieldValue { private final List elements; @@ -66,7 +66,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #arrayRemove()} transforms. */ + /** {@code FieldValue} class for {@link #arrayRemove()} transforms. */ static class ArrayRemoveFieldValue extends FieldValue { private final List elements; @@ -84,7 +84,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #increment()} transforms. */ + /** {@code FieldValue} class for {@link #increment()} transforms. */ static class NumericIncrementFieldValue extends FieldValue { private final Number operand; diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java index c1218829b8a..2b067e28d1f 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java @@ -691,7 +691,6 @@ private Task clearPersistence(Executor executor) { }); return source.getTask(); } - ; /** * Attaches a listener for a snapshots-in-sync event. The snapshots-in-sync event indicates that diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java new file mode 100644 index 00000000000..edcc47c3964 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java @@ -0,0 +1,52 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a 32-bit integer type in Firestore documents. */ +public final class Int32Value { + public final int value; + + public Int32Value(int value) { + this.value = value; + } + + /** + * Returns true if this Int32Value is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this Int32Value is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof Int32Value)) { + return false; + } + Int32Value other = (Int32Value) obj; + return value == other.value; + } + + @Override + public int hashCode() { + return value; + } + + @Override + public String toString() { + return "Int32Value{value=" + value + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java new file mode 100644 index 00000000000..4d43dae7f1d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Max Key" data type. */ +public final class MaxKey { + private static final MaxKey INSTANCE = new MaxKey(); + + private MaxKey() {} + + @NonNull + public static MaxKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MaxKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MaxKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MaxKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java new file mode 100644 index 00000000000..e815d1fd3e1 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Min Key" data type. */ +public final class MinKey { + private static final MinKey INSTANCE = new MinKey(); + + private MinKey() {} + + @NonNull + public static MinKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MinKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MinKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MinKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java new file mode 100644 index 00000000000..5ab0c13c1c9 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java @@ -0,0 +1,309 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.QuadrupleBuilder.EXPONENT_OF_INFINITY; + +/** + * A 128-bit binary floating point number which supports comparisons and creation from long, double + * and string. + * + * @param negative the sign of the number. + * @param biasedExponent the unsigned and biased (by 0x7FFF_FFFF) binary exponent. + * @param mantHi the unsigned high 64 bits of the mantissa (leading 1 omitted). + * @param mantLo the unsigned low 64 bits of the mantissa. + * + * This class is for internal usage only and should not be exposed externally. + * @hide + */ +public final class Quadruple implements Comparable { + public static final Quadruple POSITIVE_ZERO = new Quadruple(false, 0, 0, 0); + public static final Quadruple NEGATIVE_ZERO = new Quadruple(true, 0, 0, 0); + public static final Quadruple NaN = new Quadruple(false, (int) EXPONENT_OF_INFINITY, 1L << 63, 0); + public static final Quadruple NEGATIVE_INFINITY = + new Quadruple(true, (int) EXPONENT_OF_INFINITY, 0, 0); + public static final Quadruple POSITIVE_INFINITY = + new Quadruple(false, (int) EXPONENT_OF_INFINITY, 0, 0); + private static final Quadruple MIN_LONG = new Quadruple(true, bias(63), 0, 0); + private static final Quadruple POSITIVE_ONE = new Quadruple(false, bias(0), 0, 0); + private static final Quadruple NEGATIVE_ONE = new Quadruple(true, bias(0), 0, 0); + private final boolean negative; + private final int biasedExponent; + private final long mantHi; + private final long mantLo; + + /** + * Build a new quadruple from its raw representation - sign, biased exponent, 128-bit mantissa. + */ + public Quadruple(boolean negative, int biasedExponent, long mantHi, long mantLo) { + this.negative = negative; + this.biasedExponent = biasedExponent; + this.mantHi = mantHi; + this.mantLo = mantLo; + } + + /** Return the sign of this {@link Quadruple}. */ + public boolean negative() { + return negative; + } + + /** Return the unsigned-32-bit biased exponent of this {@link Quadruple}. */ + public int biasedExponent() { + return biasedExponent; + } + + /** Return the high-order unsigned-64-bits of the mantissa of this {@link Quadruple}. */ + public long mantHi() { + return mantHi; + } + + /** Return the low-order unsigned-64-bits of the mantissa of this {@link Quadruple}. */ + public long mantLo() { + return mantLo; + } + + /** Return the (unbiased) exponent of this {@link Quadruple}. */ + public int exponent() { + return biasedExponent - QuadrupleBuilder.EXPONENT_BIAS; + } + + /** Return true if this {@link Quadruple} is -0 or +0 */ + public boolean isZero() { + return biasedExponent == 0 && mantHi == 0 && mantLo == 0; + } + + /** Return true if this {@link Quadruple} is -infinity or +infinity */ + public boolean isInfinite() { + return biasedExponent == (int) EXPONENT_OF_INFINITY && mantHi == 0 && mantLo == 0; + } + + /** Return true if this {@link Quadruple} is a NaN. */ + public boolean isNaN() { + return biasedExponent == (int) EXPONENT_OF_INFINITY && !(mantHi == 0 && mantLo == 0); + } + + // equals (and hashCode) follow Double.equals: all NaNs are equal and -0 != 0 + @Override + public boolean equals(Object other) { + if (!(other instanceof Quadruple)) { + return false; + } + Quadruple otherQuadruple = (Quadruple) other; + if (isNaN()) { + return otherQuadruple.isNaN(); + } else { + return negative == otherQuadruple.negative + && biasedExponent == otherQuadruple.biasedExponent + && mantHi == otherQuadruple.mantHi + && mantLo == otherQuadruple.mantLo; + } + } + + @Override + public int hashCode() { + if (isNaN()) { + return HASH_NAN; + } else { + int hashCode = Boolean.hashCode(negative); + hashCode = hashCode * 31 + Integer.hashCode(biasedExponent); + hashCode = hashCode * 31 + Long.hashCode(mantHi); + hashCode = hashCode * 31 + Long.hashCode(mantLo); + return hashCode; + } + } + + private static final int HASH_NAN = 31 * 31 * Integer.hashCode((int) EXPONENT_OF_INFINITY); + + // Compare two quadruples, with -0 < 0, and all NaNs equal and larger than all numbers. + @Override + public int compareTo(Quadruple other) { + if (isNaN()) { + return other.isNaN() ? 0 : 1; + } + if (other.isNaN()) { + return -1; + } + int lessThan; + int greaterThan; + if (negative) { + if (!other.negative) { + return -1; + } + lessThan = 1; + greaterThan = -1; + } else { + if (other.negative) { + return 1; + } + lessThan = -1; + greaterThan = 1; + } + int expCompare = Integer.compareUnsigned(biasedExponent, other.biasedExponent); + if (expCompare < 0) { + return lessThan; + } + if (expCompare > 0) { + return greaterThan; + } + int mantHiCompare = Long.compareUnsigned(mantHi, other.mantHi); + if (mantHiCompare < 0) { + return lessThan; + } + if (mantHiCompare > 0) { + return greaterThan; + } + int mantLoCompare = Long.compareUnsigned(mantLo, other.mantLo); + if (mantLoCompare < 0) { + return lessThan; + } + if (mantLoCompare > 0) { + return greaterThan; + } + return 0; + } + + public static Quadruple fromLong(long value) { + if (value == Long.MIN_VALUE) { + return MIN_LONG; + } + if (value == 0) { + return POSITIVE_ZERO; + } + if (value == 1) { + return POSITIVE_ONE; + } + if (value == -1) { + return NEGATIVE_ONE; + } + boolean negative = value < 0; + if (negative) { + value = -value; + } + // Left-justify with the leading 1 dropped - value=0 or 1 is handled separately above, so + // leadingZeros+1 <= 63. + int leadingZeros = Long.numberOfLeadingZeros(value); + return new Quadruple(negative, bias(63 - leadingZeros), value << (leadingZeros + 1), 0); + } + + public static Quadruple fromDouble(double value) { + if (Double.isNaN(value)) { + return NaN; + } + if (Double.isInfinite(value)) { + return value < 0 ? NEGATIVE_INFINITY : POSITIVE_INFINITY; + } + if (Double.compare(value, 0.0) == 0) { + return POSITIVE_ZERO; + } + if (Double.compare(value, -0.0) == 0) { + return NEGATIVE_ZERO; + } + long bits = Double.doubleToLongBits(value); + long mantHi = bits << 12; + long exponent = bits >>> 52 & 0x7ff; + if (exponent == 0) { + // subnormal - mantHi cannot be zero as that means value==+/-0 + int leadingZeros = Long.numberOfLeadingZeros(mantHi); + mantHi = leadingZeros < 63 ? mantHi << (leadingZeros + 1) : 0; + exponent = -leadingZeros; + } + return new Quadruple(value < 0, bias((int) (exponent - 1023)), mantHi, 0); + } + + /** + * Converts a decimal number to a {@link Quadruple}. The supported format (no whitespace allowed) + * is: + * + *
    + *
  • NaN for Quadruple.NaN + *
  • Infinity or +Infinity for Quadruple.POSITIVE_INFINITY + *
  • -Infinity for Quadruple.NEGATIVE_INFINITY + *
  • regular expression: [+-]?[0-9]*(.[0-9]*)?([eE][+-]?[0-9]+)? - the exponent cannot be more + * than 9 digits, and the whole string cannot be empty + *
+ */ + public static Quadruple fromString(String s) { + if (s.equals("NaN")) { + return NaN; + } + if (s.equals("-Infinity")) { + return NEGATIVE_INFINITY; + } + if (s.equals("Infinity") || s.equals("+Infinity")) { + return POSITIVE_INFINITY; + } + char[] chars = s.toCharArray(); + byte[] digits = new byte[chars.length]; + int len = chars.length; + int i = 0; + int j = 0; + int exponent = 0; + boolean negative = false; + if (i < len) { + if (chars[i] == '-') { + negative = true; + i++; + } else if (chars[i] == '+') { + i++; + } + } + int firstDigit = i; + while (i < len && Character.isDigit(chars[i])) { + digits[j++] = (byte) (chars[i++] - '0'); + } + if (i < len && chars[i] == '.') { + int decimal = ++i; + while (i < len && Character.isDigit(chars[i])) { + digits[j++] = (byte) (chars[i++] - '0'); + } + exponent = decimal - i; + } + if (i < len && (chars[i] == 'e' || chars[i] == 'E')) { + int exponentValue = 0; + i++; + int exponentSign = 1; + if (i < len) { + if (chars[i] == '-') { + exponentSign = -1; + i++; + } else if (chars[i] == '+') { + i++; + } + } + int firstExponent = i; + while (i < len && Character.isDigit(chars[i])) { + exponentValue = exponentValue * 10 + chars[i++] - '0'; + if (i - firstExponent > 9) { + throw new NumberFormatException("Exponent too large " + s); + } + } + if (i == firstExponent) { + throw new NumberFormatException("Invalid number " + s); + } + exponent += exponentValue * exponentSign; + } + if (j == 0 || i != len) { + throw new NumberFormatException("Invalid number " + s); + } + byte[] digitsCopy = new byte[j]; + System.arraycopy(digits, 0, digitsCopy, 0, j); + QuadrupleBuilder parsed = QuadrupleBuilder.parseDecimal(digitsCopy, exponent); + return new Quadruple(negative, parsed.exponent, parsed.mantHi, parsed.mantLo); + } + + private static final int bias(int exponent) { + return exponent + QuadrupleBuilder.EXPONENT_BIAS; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java new file mode 100644 index 00000000000..1b8e4de0e40 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java @@ -0,0 +1,822 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* + * Copyright 2021 M.Vokhmentsev + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.firestore; + +/** + * This class is for internal usage only and should not be exposed externally. + * @hide + */ +public class QuadrupleBuilder { + public static QuadrupleBuilder parseDecimal(byte[] digits, int exp10) { + QuadrupleBuilder q = new QuadrupleBuilder(); + q.parse(digits, exp10); + return q; + } + + // The fields containing the value of the instance + public int exponent; + public long mantHi; + public long mantLo; + // 2^192 = 6.277e57, so the 58-th digit after point may affect the result + static final int MAX_MANTISSA_LENGTH = 59; + // Max value of the decimal exponent, corresponds to EXPONENT_OF_MAX_VALUE + static final int MAX_EXP10 = 646456993; + // Min value of the decimal exponent, corresponds to EXPONENT_OF_MIN_NORMAL + static final int MIN_EXP10 = -646457032; + // (2^63) / 10 =~ 9.223372e17 + static final double TWO_POW_63_DIV_10 = 922337203685477580.0; + // Just for convenience: 0x8000_0000_0000_0000L + static final long HIGH_BIT = 0x8000000000000000L; + // Just for convenience: 0x8000_0000L, 2^31 + static final double POW_2_31 = 2147483648.0; + // Just for convenience: 0x0000_0000_FFFF_FFFFL + static final long LOWER_32_BITS = 0x00000000FFFFFFFFL; + // Just for convenience: 0xFFFF_FFFF_0000_0000L; + static final long HIGHER_32_BITS = 0xFFFFFFFF00000000L; + // Approximate value of log2(10) + static final double LOG2_10 = Math.log(10) / Math.log(2); + // Approximate value of log2(e) + static final double LOG2_E = 1 / Math.log(2.0); + // The value of the exponent (biased) corresponding to {@code 1.0 == 2^0}; equals to 2_147_483_647 + // ({@code 0x7FFF_FFFF}). + static final int EXPONENT_BIAS = 0x7FFF_FFFF; + // The value of the exponent (biased), corresponding to {@code Infinity}, {@code _Infinty}, and + // {@code NaN} + static final long EXPONENT_OF_INFINITY = 0xFFFFFFFFL; + // An array of positive powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten Used to find an arbitrary power of 2 (by powerOfTwo(long exp)) + private static final long[][] POS_POWERS_OF_2 = { // 0: 2^0 = 1 = 0.1e1 + { + 1, 0x1999_9999_9999_9999L, 0x9999_9999_9999_9999L, 0x9999_9999_9999_999aL + }, // 1: 2^(2^0) = 2^1 = 2 = 0.2e1 + {1, 0x3333_3333_3333_3333L, 0x3333_3333_3333_3333L, 0x3333_3333_3333_3334L}, // *** + // 2: 2^(2^1) = 2^2 = 4 = 0.4e1 + {1, 0x6666_6666_6666_6666L, 0x6666_6666_6666_6666L, 0x6666_6666_6666_6667L}, // *** + // 3: 2^(2^2) = 2^4 = 16 = 0.16e2 + {2, 0x28f5_c28f_5c28_f5c2L, 0x8f5c_28f5_c28f_5c28L, 0xf5c2_8f5c_28f5_c290L}, // *** + // 4: 2^(2^3) = 2^8 = 256 = 0.256e3 + {3, 0x4189_374b_c6a7_ef9dL, 0xb22d_0e56_0418_9374L, 0xbc6a_7ef9_db22_d0e6L}, // *** + // 5: 2^(2^4) = 2^16 = 65536 = 0.65536e5 + { + 5, 0xa7c5_ac47_1b47_8423L, 0x0fcf_80dc_3372_1d53L, 0xcddd_6e04_c059_2104L + }, // 6: 2^(2^5) = 2^32 = 4294967296 = 0.4294967296e10 + { + 10, 0x6df3_7f67_5ef6_eadfL, 0x5ab9_a207_2d44_268dL, 0x97df_837e_6748_956eL + }, // 7: 2^(2^6) = 2^64 = 18446744073709551616 = 0.18446744073709551616e20 + { + 20, 0x2f39_4219_2484_46baL, 0xa23d_2ec7_29af_3d61L, 0x0607_aa01_67dd_94cbL + }, // 8: 2^(2^7) = 2^128 = 340282366920938463463374607431768211456 = + // 0.340282366920938463463374607431768211456e39 + { + 39, 0x571c_bec5_54b6_0dbbL, 0xd5f6_4baf_0506_840dL, 0x451d_b70d_5904_029bL + }, // 9: 2^(2^8) = 2^256 = + // 1.1579208923731619542357098500868790785326998466564056403945758401E+77 = + // 0.11579208923731619542357098500868790785326998466564056403945758401e78 + {78, 0x1da4_8ce4_68e7_c702L, 0x6520_247d_3556_476dL, 0x1469_caf6_db22_4cfaL}, // *** + // 10: 2^(2^9) = 2^512 = + // 1.3407807929942597099574024998205846127479365820592393377723561444E+154 = + // 0.13407807929942597099574024998205846127479365820592393377723561444e155 + { + 155, 0x2252_f0e5_b397_69dcL, 0x9ae2_eea3_0ca3_ade0L, 0xeeaa_3c08_dfe8_4e30L + }, // 11: 2^(2^10) = 2^1024 = + // 1.7976931348623159077293051907890247336179769789423065727343008116E+308 = + // 0.17976931348623159077293051907890247336179769789423065727343008116e309 + { + 309, 0x2e05_5c9a_3f6b_a793L, 0x1658_3a81_6eb6_0a59L, 0x22c4_b082_6cf1_ebf7L + }, // 12: 2^(2^11) = 2^2048 = + // 3.2317006071311007300714876688669951960444102669715484032130345428E+616 = + // 0.32317006071311007300714876688669951960444102669715484032130345428e617 + { + 617, 0x52bb_45e9_cf23_f17fL, 0x7688_c076_06e5_0364L, 0xb344_79aa_9d44_9a57L + }, // 13: 2^(2^12) = 2^4096 = + // 1.0443888814131525066917527107166243825799642490473837803842334833E+1233 = + // 0.10443888814131525066917527107166243825799642490473837803842334833e1234 + { + 1234, 0x1abc_81c8_ff5f_846cL, 0x8f5e_3c98_53e3_8c97L, 0x4506_0097_f3bf_9296L + }, // 14: 2^(2^13) = 2^8192 = + // 1.0907481356194159294629842447337828624482641619962326924318327862E+2466 = + // 0.10907481356194159294629842447337828624482641619962326924318327862e2467 + { + 2467, 0x1bec_53b5_10da_a7b4L, 0x4836_9ed7_7dbb_0eb1L, 0x3b05_587b_2187_b41eL + }, // 15: 2^(2^14) = 2^16384 = + // 1.1897314953572317650857593266280071307634446870965102374726748212E+4932 = + // 0.11897314953572317650857593266280071307634446870965102374726748212e4933 + { + 4933, 0x1e75_063a_5ba9_1326L, 0x8abf_b8e4_6001_6ae3L, 0x2800_8702_d29e_8a3cL + }, // 16: 2^(2^15) = 2^32768 = + // 1.4154610310449547890015530277449516013481307114723881672343857483E+9864 = + // 0.14154610310449547890015530277449516013481307114723881672343857483e9865 + { + 9865, 0x243c_5d8b_b5c5_fa55L, 0x40c6_d248_c588_1915L, 0x4c0f_d99f_d5be_fc22L + }, // 17: 2^(2^16) = 2^65536 = + // 2.0035299304068464649790723515602557504478254755697514192650169737E+19728 = + // 0.20035299304068464649790723515602557504478254755697514192650169737e19729 + { + 19729, 0x334a_5570_c3f4_ef3cL, 0xa13c_36c4_3f97_9c90L, 0xda7a_c473_555f_b7a8L + }, // 18: 2^(2^17) = 2^131072 = + // 4.0141321820360630391660606060388767343771510270414189955825538065E+39456 = + // 0.40141321820360630391660606060388767343771510270414189955825538065e39457 + { + 39457, 0x66c3_0444_5dd9_8f3bL, 0xa8c2_93a2_0e47_a41bL, 0x4c5b_03dc_1260_4964L + }, // 19: 2^(2^18) = 2^262144 = + // 1.6113257174857604736195721184520050106440238745496695174763712505E+78913 = + // 0.16113257174857604736195721184520050106440238745496695174763712505e78914 + { + 78914, 0x293f_fbf5_fb02_8cc4L, 0x89d3_e5ff_4423_8406L, 0x369a_339e_1bfe_8c9bL + }, // 20: 2^(2^19) = 2^524288 = + // 2.5963705678310007761265964957268828277447343763484560463573654868E+157826 = + // 0.25963705678310007761265964957268828277447343763484560463573654868e157827 + { + 157827, 0x4277_92fb_b68e_5d20L, 0x7b29_7cd9_fc15_4b62L, 0xf091_4211_4aa9_a20cL + }, // 21: 2^(2^20) = 2^1048576 = + // 6.7411401254990734022690651047042454376201859485326882846944915676E+315652 = + // 0.67411401254990734022690651047042454376201859485326882846944915676e315653 + { + 315653, 0xac92_bc65_ad5c_08fcL, 0x00be_eb11_5a56_6c19L, 0x4ba8_82d8_a462_2437L + }, // 22: 2^(2^21) = 2^2097152 = + // 4.5442970191613663099961595907970650433180103994591456270882095573E+631305 = + // 0.45442970191613663099961595907970650433180103994591456270882095573e631306 + { + 631306, 0x7455_8144_0f92_e80eL, 0x4da8_22cf_7f89_6f41L, 0x509d_5986_7816_4ecdL + }, // 23: 2^(2^22) = 2^4194304 = + // 2.0650635398358879243991194945816501695274360493029670347841664177E+1262611 = + // 0.20650635398358879243991194945816501695274360493029670347841664177e1262612 + { + 1262612, 0x34dd_99b4_c695_23a5L, 0x64bc_2e8f_0d8b_1044L, 0xb03b_1c96_da5d_d349L + }, // 24: 2^(2^23) = 2^8388608 = + // 4.2644874235595278724327289260856157547554200794957122157246170406E+2525222 = + // 0.42644874235595278724327289260856157547554200794957122157246170406e2525223 + { + 2525223, 0x6d2b_bea9_d6d2_5a08L, 0xa0a4_606a_88e9_6b70L, 0x1820_63bb_c2fe_8520L + }, // 25: 2^(2^24) = 2^16777216 = + // 1.8185852985697380078927713277749906189248596809789408311078112486E+5050445 = + // 0.18185852985697380078927713277749906189248596809789408311078112486e5050446 + { + 5050446, 0x2e8e_47d6_3bfd_d6e3L, 0x2b55_fa89_76ea_a3e9L, 0x1a6b_9d30_8641_2a73L + }, // 26: 2^(2^25) = 2^33554432 = + // 3.3072524881739831340558051919726975471129152081195558970611353362E+10100890 = + // 0.33072524881739831340558051919726975471129152081195558970611353362e10100891 + { + 10100891, 0x54aa_68ef_a1d7_19dfL, 0xd850_5806_612c_5c8fL, 0xad06_8837_fee8_b43aL + }, // 27: 2^(2^26) = 2^67108864 = + // 1.0937919020533002449982468634925923461910249420785622990340704603E+20201781 = + // 0.10937919020533002449982468634925923461910249420785622990340704603e20201782 + { + 20201782, 0x1c00_464c_cb7b_ae77L, 0x9e38_7778_4c77_982cL, 0xd94a_f3b6_1717_404fL + }, // 28: 2^(2^27) = 2^134217728 = + // 1.1963807249973763567102377630870670302911237824129274789063323723E+40403562 = + // 0.11963807249973763567102377630870670302911237824129274789063323723e40403563 + { + 40403563, 0x1ea0_99c8_be2b_6cd0L, 0x8bfb_6d53_9fa5_0466L, 0x6d3b_c37e_69a8_4218L + }, // 29: 2^(2^28) = 2^268435456 = + // 1.4313268391452478724777126233530788980596273340675193575004129517E+80807124 = + // 0.14313268391452478724777126233530788980596273340675193575004129517e80807125 + { + 80807125, 0x24a4_57f4_66ce_8d18L, 0xf2c8_f3b8_1bc6_bb59L, 0xa78c_7576_92e0_2d49L + }, // 30: 2^(2^29) = 2^536870912 = + // 2.0486965204575262773910959587280218683219330308711312100181276813E+161614248 = + // 0.20486965204575262773910959587280218683219330308711312100181276813e161614249 + { + 161614249, 0x3472_5667_7aba_6b53L, 0x3fbf_90d3_0611_a67cL, 0x1e03_9d87_e0bd_b32bL + }, // 31: 2^(2^30) = 2^1073741824 = + // 4.1971574329347753848087162337676781412761959309467052555732924370E+323228496 = + // 0.41971574329347753848087162337676781412761959309467052555732924370e323228497 + { + 323228497, 0x6b72_7daf_0fd3_432aL, 0x71f7_1121_f9e4_200fL, 0x8fcd_9942_d486_c10cL + }, // 32: 2^(2^31) = 2^2147483648 = + // 1.7616130516839633532074931497918402856671115581881347960233679023E+646456993 = + // 0.17616130516839633532074931497918402856671115581881347960233679023e646456994 + {646456994, 0x2d18_e844_84d9_1f78L, 0x4079_bfe7_829d_ec6fL, 0x2155_1643_e365_abc6L} + }; + // An array of negative powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten. Used to find an arbitrary power of 2 (by powerOfTwo(long + // exp)) + private static final long[][] NEG_POWERS_OF_2 = { // v18 + // 0: 2^0 = 1 = 0.1e1 + { + 1, 0x1999_9999_9999_9999L, 0x9999_9999_9999_9999L, 0x9999_9999_9999_999aL + }, // 1: 2^-(2^0) = 2^-1 = 0.5 = 0.5e0 + { + 0, 0x8000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 2: 2^-(2^1) = 2^-2 = 0.25 = 0.25e0 + // {0, 0x4000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L}, + {0, 0x4000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 3: 2^-(2^2) = 2^-4 = 0.0625 = 0.625e-1 + { + -1, 0xa000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 4: 2^-(2^3) = 2^-8 = 0.00390625 = 0.390625e-2 + { + -2, 0x6400_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 5: 2^-(2^4) = 2^-16 = 0.0000152587890625 = 0.152587890625e-4 + {-4, 0x2710_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 6: 2^-(2^5) = 2^-32 = 2.3283064365386962890625E-10 = 0.23283064365386962890625e-9 + {-9, 0x3b9a_ca00_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 7: 2^-(2^6) = 2^-64 = 5.42101086242752217003726400434970855712890625E-20 = + // 0.542101086242752217003726400434970855712890625e-19 + { + -19, 0x8ac7_2304_89e8_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 8: 2^-(2^7) = 2^-128 = + // 2.9387358770557187699218413430556141945466638919302188037718792657E-39 = + // 0.29387358770557187699218413430556141945466638919302188037718792657e-38 + {-38, 0x4b3b_4ca8_5a86_c47aL, 0x098a_2240_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 9: 2^-(2^8) = 2^-256 = + // 8.6361685550944446253863518628003995711160003644362813850237034700E-78 = + // 0.86361685550944446253863518628003995711160003644362813850237034700e-77 + { + -77, 0xdd15_fe86_affa_d912L, 0x49ef_0eb7_13f3_9ebeL, 0xaa98_7b6e_6fd2_a002L + }, // 10: 2^-(2^9) = 2^-512 = + // 7.4583407312002067432909653154629338373764715346004068942715183331E-155 = + // 0.74583407312002067432909653154629338373764715346004068942715183331e-154 + { + -154, 0xbeee_fb58_4aff_8603L, 0xaafb_550f_facf_d8faL, 0x5ca4_7e4f_88d4_5371L + }, // 11: 2^-(2^10) = 2^-1024 = + // 5.5626846462680034577255817933310101605480399511558295763833185421E-309 = + // 0.55626846462680034577255817933310101605480399511558295763833185421e-308 + {-308, 0x8e67_9c2f_5e44_ff8fL, 0x570f_09ea_a7ea_7648L, 0x5961_db50_c6d2_b888L}, // *** + // 12: 2^-(2^11) = 2^-2048 = + // 3.0943460473825782754801833699711978538925563038849690459540984582E-617 = + // 0.30943460473825782754801833699711978538925563038849690459540984582e-616 + { + -616, 0x4f37_1b33_99fc_2ab0L, 0x8170_041c_9feb_05aaL, 0xc7c3_4344_7c75_bcf6L + }, // 13: 2^-(2^12) = 2^-4096 = + // 9.5749774609521853579467310122804202420597417413514981491308464986E-1234 = + // 0.95749774609521853579467310122804202420597417413514981491308464986e-1233 + { + -1233, 0xf51e_9281_7901_3fd3L, 0xde4b_d12c_de4d_985cL, 0x4a57_3ca6_f94b_ff14L + }, // 14: 2^-(2^13) = 2^-8192 = + // 9.1680193377742358281070619602424158297818248567928361864131947526E-2467 = + // 0.91680193377742358281070619602424158297818248567928361864131947526e-2466 + { + -2466, 0xeab3_8812_7bcc_aff7L, 0x1667_6391_42b9_fbaeL, 0x775e_c999_5e10_39fbL + }, // 15: 2^-(2^14) = 2^-16384 = + // 8.4052578577802337656566945433043815064951983621161781002720680748E-4933 = + // 0.84052578577802337656566945433043815064951983621161781002720680748e-4932 + { + -4932, 0xd72c_b2a9_5c7e_f6ccL, 0xe81b_f1e8_25ba_7515L, 0xc2fe_b521_d6cb_5dcdL + }, // 16: 2^-(2^15) = 2^-32768 = + // 7.0648359655776364427774021878587184537374439102725065590941425796E-9865 = + // 0.70648359655776364427774021878587184537374439102725065590941425796e-9864 + {-9864, 0xb4dc_1be6_6045_02dcL, 0xd491_079b_8eef_6535L, 0x578d_3965_d24d_e84dL}, // *** + // 17: 2^-(2^16) = 2^-65536 = + // 4.9911907220519294656590574792132451973746770423207674161425040336E-19729 = + // 0.49911907220519294656590574792132451973746770423207674161425040336e-19728 + {-19728, 0x7fc6_447b_ee60_ea43L, 0x2548_da5c_8b12_5b27L, 0x5f42_d114_2f41_d349L}, // *** + // 18: 2^-(2^17) = 2^-131072 = + // 2.4911984823897261018394507280431349807329035271689521242878455599E-39457 = + // 0.24911984823897261018394507280431349807329035271689521242878455599e-39456 + {-39456, 0x3fc6_5180_f88a_f8fbL, 0x6a69_15f3_8334_9413L, 0x063c_3708_b6ce_b291L}, // *** + // 19: 2^-(2^18) = 2^-262144 = + // 6.2060698786608744707483205572846793091942192651991171731773832448E-78914 = + // 0.62060698786608744707483205572846793091942192651991171731773832448e-78913 + { + -78913, 0x9ee0_197c_8dcd_55bfL, 0x2b2b_9b94_2c38_f4a2L, 0x0f8b_a634_e9c7_06aeL + }, // 20: 2^-(2^19) = 2^-524288 = + // 3.8515303338821801176537443725392116267291403078581314096728076497E-157827 = + // 0.38515303338821801176537443725392116267291403078581314096728076497e-157826 + {-157826, 0x6299_63a2_5b8b_2d79L, 0xd00b_9d22_86f7_0876L, 0xe970_0470_0c36_44fcL}, // *** + // 21: 2^-(2^20) = 2^-1048576 = + // 1.4834285912814577854404052243709225888043963245995136935174170977E-315653 = + // 0.14834285912814577854404052243709225888043963245995136935174170977e-315652 + { + -315652, 0x25f9_cc30_8cee_f4f3L, 0x40f1_9543_911a_4546L, 0xa2cd_3894_52cf_c366L + }, // 22: 2^-(2^21) = 2^-2097152 = + // 2.2005603854312903332428997579002102976620485709683755186430397089E-631306 = + // 0.22005603854312903332428997579002102976620485709683755186430397089e-631305 + { + -631305, 0x3855_97b0_d47e_76b8L, 0x1b9f_67e1_03bf_2329L, 0xc311_9848_5959_85f7L + }, // 23: 2^-(2^22) = 2^-4194304 = + // 4.8424660099295090687215589310713586524081268589231053824420510106E-1262612 = + // 0.48424660099295090687215589310713586524081268589231053824420510106e-1262611 + {-1262611, 0x7bf7_95d2_76c1_2f66L, 0x66a6_1d62_a446_659aL, 0xa1a4_d73b_ebf0_93d5L}, // *** + // 24: 2^-(2^23) = 2^-8388608 = + // 2.3449477057322620222546775527242476219043877555386221929831430440E-2525223 = + // 0.23449477057322620222546775527242476219043877555386221929831430440e-2525222 + {-2525222, 0x3c07_d96a_b1ed_7799L, 0xcb73_55c2_2cc0_5ac0L, 0x4ffc_0ab7_3b1f_6a49L}, // *** + // 25: 2^-(2^24) = 2^-16777216 = + // 5.4987797426189993226257377747879918011694025935111951649826798628E-5050446 = + // 0.54987797426189993226257377747879918011694025935111951649826798628e-5050445 + {-5050445, 0x8cc4_cd8c_3ede_fb9aL, 0x6c8f_f86a_90a9_7e0cL, 0x166c_fddb_f98b_71bfL}, // *** + // 26: 2^-(2^25) = 2^-33554432 = + // 3.0236578657837068435515418409027857523343464783010706819696074665E-10100891 = + // 0.30236578657837068435515418409027857523343464783010706819696074665e-10100890 + {-10100890, 0x4d67_d81c_c88e_1228L, 0x1d7c_fb06_666b_79b3L, 0x7b91_6728_aaa4_e70dL}, // *** + // 27: 2^-(2^26) = 2^-67108864 = + // 9.1425068893156809483320844568740945600482370635012633596231964471E-20201782 = + // 0.91425068893156809483320844568740945600482370635012633596231964471e-20201781 + {-20201781, 0xea0c_5549_4e7a_552dL, 0xb88c_b948_4bb8_6c61L, 0x8d44_893c_610b_b7dFL}, // *** + // 28: 2^-(2^27) = 2^-134217728 = + // 8.3585432221184688810803924874542310018191301711943564624682743545E-40403563 = + // 0.83585432221184688810803924874542310018191301711943564624682743545e-40403562 + { + -40403562, 0xd5fa_8c82_1ec0_c24aL, 0xa80e_46e7_64e0_f8b0L, 0xa727_6bfa_432f_ac7eL + }, // 29: 2^-(2^28) = 2^-268435456 = + // 6.9865244796022595809958912202005005328020601847785697028605460277E-80807125 = + // 0.69865244796022595809958912202005005328020601847785697028605460277e-80807124 + { + -80807124, 0xb2da_e307_426f_6791L, 0xc970_b82f_58b1_2918L, 0x0472_592f_7f39_190eL + }, // 30: 2^-(2^29) = 2^-536870912 = + // 4.8811524304081624052042871019605298977947353140996212667810837790E-161614249 = + // 0.48811524304081624052042871019605298977947353140996212667810837790e-161614248 + // {-161614248, 0x7cf5_1edd_8a15_f1c9L, 0x656d_ab34_98f8_e697L, 0x12da_a2a8_0e53_c809L}, + { + -161614248, 0x7cf5_1edd_8a15_f1c9L, 0x656d_ab34_98f8_e697L, 0x12da_a2a8_0e53_c807L + }, // 31: 2^-(2^30) = 2^-1073741824 = + // 2.3825649048879510732161697817326745204151961255592397879550237608E-323228497 = + // 0.23825649048879510732161697817326745204151961255592397879550237608e-323228496 + { + -323228496, 0x3cfe_609a_b588_3c50L, 0xbec8_b5d2_2b19_8871L, 0xe184_7770_3b46_22b4L + }, // 32: 2^-(2^31) = 2^-2147483648 = + // 5.6766155260037313438164181629489689531186932477276639365773003794E-646456994 = + // 0.56766155260037313438164181629489689531186932477276639365773003794e-646456993 + {-646456993, 0x9152_447b_9d7c_da9aL, 0x3b4d_3f61_10d7_7aadL, 0xfa81_bad1_c394_adb4L} + }; + // Buffers used internally + // The order of words in the arrays is big-endian: the highest part is in buff[0] (in buff[1] for + // buffers of 10 words) + + private final long[] buffer4x64B = new long[4]; + private final long[] buffer6x32A = new long[6]; + private final long[] buffer6x32B = new long[6]; + private final long[] buffer6x32C = new long[6]; + private final long[] buffer12x32 = new long[12]; + + private void parse(byte[] digits, int exp10) { + exp10 += (digits).length - 1; // digits is viewed as x.yyy below. + this.exponent = 0; + this.mantHi = 0L; + this.mantLo = 0L; + // Finds numeric value of the decimal mantissa + long[] mantissa = this.buffer6x32C; + int exp10Corr = parseMantissa(digits, mantissa); + if (exp10Corr == 0 && isEmpty(mantissa)) { + // Mantissa == 0 + return; + } + // takes account of the point position in the mant string and possible carry as a result of + // round-up (like 9.99e1 -> 1.0e2) + exp10 += exp10Corr; + if (exp10 < MIN_EXP10) { + return; + } + if (exp10 > MAX_EXP10) { + this.exponent = ((int) (long) (EXPONENT_OF_INFINITY)); + return; + } + double exp2 = findBinaryExponent(exp10, mantissa); + // Finds binary mantissa and possible exponent correction. Fills the fields. + findBinaryMantissa(exp10, exp2, mantissa); + } + + private int parseMantissa(byte[] digits, long[] mantissa) { + for (int i = (0); i < (6); i++) { + mantissa[i] = 0L; + } + // Skip leading zeroes + int firstDigit = 0; + while (firstDigit < (digits).length && digits[firstDigit] == 0) { + firstDigit += 1; + } + if (firstDigit == (digits).length) { + return 0; // All zeroes + } + int expCorr = -firstDigit; + // Limit the string length to avoid unnecessary fuss + if ((digits).length - firstDigit > MAX_MANTISSA_LENGTH) { + boolean carry = digits[MAX_MANTISSA_LENGTH] >= 5; // The highest digit to be truncated + byte[] truncated = new byte[MAX_MANTISSA_LENGTH]; + ; + for (int i = (0); i < (MAX_MANTISSA_LENGTH); i++) { + truncated[i] = digits[i + firstDigit]; + } + if (carry) { // Round-up: add carry + expCorr += addCarry(truncated); // May add an extra digit in front of it (99..99 -> 100) + } + digits = truncated; + firstDigit = 0; + } + for (int i = ((digits).length) - 1; i >= (firstDigit); i--) { // digits, starting from the last + mantissa[0] |= ((long) (digits[i])) << 32L; + divBuffBy10(mantissa); + } + return expCorr; + } + + // Divides the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to divide (32 least significant bits are used) + private void divBuffBy10(long[] buffer) { + int maxIdx = (buffer).length; + // big/endian + for (int i = (0); i < (maxIdx); i++) { + long r = buffer[i] % 10L; + buffer[i] = ((buffer[i]) / (10L)); + if (i + 1 < maxIdx) { + buffer[i + 1] += r << 32L; + } + } + } + + // Checks if the buffer is empty (contains nothing but zeros) + // @param buffer the buffer to check + // @return {@code true} if the buffer is empty, {@code false} otherwise + private boolean isEmpty(long[] buffer) { + for (int i = (0); i < ((buffer).length); i++) { + if (buffer[i] != 0L) { + return false; + } + } + return true; + } + + // Adds one to a decimal number represented as a sequence of decimal digits. propagates carry as + // needed, so that {@code addCarryTo("6789") = "6790", addCarryTo("9999") = "10000"} etc. + // @return 1 if an additional higher "1" was added in front of the number as a result of + // rounding-up, 0 otherwise + private int addCarry(byte[] digits) { + for (int i = ((digits).length) - 1; i >= (0); i--) { // starting with the lowest digit + byte c = digits[i]; + if (c == 9) { + digits[i] = 0; + } else { + digits[i] = ((byte) (digits[i] + 1)); + return 0; + } + } + digits[0] = 1; + return 1; + } + + // Finds binary exponent, using decimal exponent and mantissa.
+ // exp2 = exp10 * log2(10) + log2(mant)
+ // @param exp10 decimal exponent + // @param mantissa array of longs containing decimal mantissa (divided by 10) + // @return found value of binary exponent + private double findBinaryExponent(int exp10, long[] mantissa) { + long mant10 = + mantissa[0] << 31L | ((mantissa[1]) >>> (1L)); // Higher 63 bits of the mantissa, in range + // 0x0CC..CCC -- 0x7FF..FFF (2^63/10 -- 2^63-1) + // decimal value of the mantissa in range 1.0..9.9999... + double mant10d = ((double) (mant10)) / TWO_POW_63_DIV_10; + return ((long) Math.floor(((double) (exp10)) * LOG2_10 + log2(mant10d))); // Binary exponent + } + + // Calculates log2 of the given x + // @param x argument that can't be 0 + // @return the value of log2(x) + private double log2(double x) { + // x can't be 0 + return LOG2_E * Math.log(x); + } + + private void findBinaryMantissa(int exp10, double exp2, long[] mantissa) { + // pow(2, -exp2): division by 2^exp2 is multiplication by 2^(-exp2) actually + long[] powerOf2 = this.buffer4x64B; + powerOfTwo(-exp2, powerOf2); + long[] product = this.buffer12x32; // use it for the product (M * 10^E / 2^e) + multUnpacked6x32byPacked(mantissa, powerOf2, product); // product in buff_12x32 + multBuffBy10(product); // "Quasidecimals" are numbers divided by 10 + // The powerOf2[0] is stored as an unsigned value + if (((long) (powerOf2[0])) != ((long) (-exp10))) { + // For some combinations of exp2 and exp10, additional multiplication needed + // (see mant2_from_M_E_e.xls) + multBuffBy10(product); + } + // compensate possible inaccuracy of logarithms used to compute exp2 + exp2 += normalizeMant(product); + exp2 += EXPONENT_BIAS; // add bias + // For subnormal values, exp2 <= 0. We just return 0 for them, as they are + // far from any range we are interested in. + if (exp2 <= 0) { + return; + } + exp2 += roundUp(product); // round up, may require exponent correction + if (((long) (exp2)) >= EXPONENT_OF_INFINITY) { + this.exponent = ((int) (long) (EXPONENT_OF_INFINITY)); + } else { + this.exponent = ((int) (long) (exp2)); + this.mantHi = ((product[0] << 32L) + product[1]); + this.mantLo = ((product[2] << 32L) + product[3]); + } + } + + // Calculates the required power and returns the result in the quasidecimal format (an array of + // longs, where result[0] is the decimal exponent of the resulting value, and result[1] -- + // result[3] contain 192 bits of the mantissa divided by ten (so that 8 looks like + //
{@code {1, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCDL}}}
+ // uses arrays buffer4x64B, buffer6x32A, buffer6x32B, buffer12x32, + // @param exp the power to raise 2 to + // @param power (result) the value of {@code2^exp} + private void powerOfTwo(double exp, long[] power) { + if (exp == 0) { + array_copy(POS_POWERS_OF_2[0], power); + return; + } + // positive powers of 2 (2^0, 2^1, 2^2, 2^4, 2^8 ... 2^(2^31) ) + long[][] powers = (POS_POWERS_OF_2); + if (exp < 0) { + exp = -exp; + powers = (NEG_POWERS_OF_2); // positive powers of 2 (2^0, 2^-1, 2^-2, 2^-4, 2^-8 ... 2^30) + } + // 2^31 = 0x8000_0000L; a single bit that will be shifted right at every iteration + double currPowOf2 = POW_2_31; + int idx = 32; // Index in the table of powers + boolean first_power = true; + // if exp = b31 * 2^31 + b30 * 2^30 + .. + b0 * 2^0, where b0..b31 are the values of the bits in + // exp, then 2^exp = 2^b31 * 2^b30 ... * 2^b0. Find the product, using a table of powers of 2. + while (exp > 0) { + if (exp >= currPowOf2) { // the current bit in the exponent is 1 + if (first_power) { + // 4 longs, power[0] -- decimal (?) exponent, power[1..3] -- 192 bits of mantissa + array_copy((powers)[idx], power); + first_power = false; + } else { + // Multiply by the corresponding power of 2 + multPacked3x64_AndAdjustExponent(power, (powers)[idx], power); + } + exp -= currPowOf2; + } + idx -= 1; + currPowOf2 = currPowOf2 * 0.5; // Note: this is exact + } + } + + // Copies from into to. + private void array_copy(long[] source, long[] dest) { + for (int i = (0); i < ((dest).length); i++) { + dest[i] = source[i]; + } + } + + // Multiplies two quasidecimal numbers contained in buffers of 3 x 64 bits with exponents, puts + // the product to buffer4x64B
+ // and returns it. Both each of the buffers and the product contain 4 longs - exponent and 3 x 64 + // bits of mantissa. If the higher word of mantissa of the product is less than + // 0x1999_9999_9999_9999L (i.e. mantissa is less than 0.1) multiplies mantissa by 10 and adjusts + // the exponent respectively. + private void multPacked3x64_AndAdjustExponent(long[] factor1, long[] factor2, long[] result) { + multPacked3x64_simply(factor1, factor2, this.buffer12x32); + int expCorr = correctPossibleUnderflow(this.buffer12x32); + pack_6x32_to_3x64(this.buffer12x32, result); + // result[0] is a signed int64 value stored in an uint64 + result[0] = factor1[0] + factor2[0] + ((long) (expCorr)); // product.exp = f1.exp + f2.exp + } + + // Multiplies mantissas of two packed quasidecimal values (each is an array of 4 longs, exponent + + // 3 x 64 bits of mantissa) Returns the product as unpacked buffer of 12 x 32 (12 x 32 bits of + // product) + // uses arrays buffer6x32A, buffer6x32B + // @param factor1 an array of longs containing factor 1 as packed quasidecimal + // @param factor2 an array of longs containing factor 2 as packed quasidecimal + // @param result an array of 12 longs filled with the product of mantissas + private void multPacked3x64_simply(long[] factor1, long[] factor2, long[] result) { + for (int i = (0); i < ((result).length); i++) { + result[i] = 0L; + } + // TODO2 19.01.16 21:23:06 for the next version -- rebuild the table of powers to make the + // numbers unpacked, to avoid packing/unpacking + unpack_3x64_to_6x32(factor1, this.buffer6x32A); + unpack_3x64_to_6x32(factor2, this.buffer6x32B); + for (int i = (6) - 1; i >= (0); i--) { // compute partial 32-bit products + for (int j = (6) - 1; j >= (0); j--) { + long part = this.buffer6x32A[i] * this.buffer6x32B[j]; + result[j + i + 1] = (result[j + i + 1] + (part & LOWER_32_BITS)); + result[j + i] = (result[j + i] + ((part) >>> (32L))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int i = (12) - 1; i >= (1); i--) { + result[i - 1] = (result[i - 1] + ((result[i]) >>> (32L))); + result[i] &= LOWER_32_BITS; + } + } + + // Corrects possible underflow of the decimal mantissa, passed in in the {@code mantissa}, by + // multiplying it by a power of ten. The corresponding value to adjust the decimal exponent is + // returned as the result + // @param mantissa a buffer containing the mantissa to be corrected + // @return a corrective (addition) that is needed to adjust the decimal exponent of the number + private int correctPossibleUnderflow(long[] mantissa) { + int expCorr = 0; + while (isLessThanOne(mantissa)) { // Underflow + multBuffBy10(mantissa); + expCorr -= 1; + } + return expCorr; + } + + // Checks if the unpacked quasidecimal value held in the given buffer is less than one (in this + // format, one is represented as { 0x1999_9999L, 0x9999_9999L, 0x9999_9999L,...} + // @param buffer a buffer containing the value to check + // @return {@code true}, if the value is less than one + private boolean isLessThanOne(long[] buffer) { + if (buffer[0] < 0x1999_9999L) { + return true; + } + if (buffer[0] > 0x1999_9999L) { + return false; + } + // A note regarding the coverage: + // Multiplying a 128-bit number by another 192-bit number, + // as well as multiplying of two 192-bit numbers, + // can never produce 320 (or 384 bits, respectively) of 0x1999_9999L, 0x9999_9999L, + for (int i = (1); i < ((buffer).length); i++) { + // so this loop can't be covered entirely + if (buffer[i] < 0x9999_9999L) { + return true; + } + if (buffer[i] > 0x9999_9999L) { + return false; + } + } + // and it can never reach this point in real life. + return false; // Still Java requires the return statement here. + } + + // Multiplies unpacked 192-bit value by a packed 192-bit factor
+ // uses static arrays buffer6x32B + // @param factor1 a buffer containing unpacked quasidecimal mantissa (6 x 32 bits) + // @param factor2 an array of 4 longs containing packed quasidecimal power of two + // @param product a buffer of at least 12 longs to hold the product + private void multUnpacked6x32byPacked(long[] factor1, long[] factor2, long[] product) { + for (int i = (0); i < ((product).length); i++) { + product[i] = 0L; + } + long[] unpacked2 = this.buffer6x32B; + unpack_3x64_to_6x32(factor2, unpacked2); // It's the powerOf2, with exponent in 0'th word + int maxFactIdx = (factor1).length; + for (int i = (maxFactIdx) - 1; i >= (0); i--) { // compute partial 32-bit products + for (int j = (maxFactIdx) - 1; j >= (0); j--) { + long part = factor1[i] * unpacked2[j]; + product[j + i + 1] = (product[j + i + 1] + (part & LOWER_32_BITS)); + product[j + i] = (product[j + i] + ((part) >>> (32L))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int i = (12) - 1; i >= (1); i--) { + product[i - 1] = (product[i - 1] + ((product[i]) >>> (32L))); + product[i] &= LOWER_32_BITS; + } + } + + // Multiplies the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to multiply (32 least significant bits are used) + private void multBuffBy10(long[] buffer) { + int maxIdx = (buffer).length - 1; + buffer[0] &= LOWER_32_BITS; + buffer[maxIdx] *= 10L; + for (int i = (maxIdx) - 1; i >= (0); i--) { + buffer[i] = (buffer[i] * 10L + ((buffer[i + 1]) >>> (32L))); + buffer[i + 1] &= LOWER_32_BITS; + } + } + + // Makes sure that the (unpacked) mantissa is normalized, + // i.e. buff[0] contains 1 in bit 32 (the implied integer part) and higher 32 of mantissa in bits + // 31..0, + // and buff[1]..buff[4] contain other 96 bits of mantissa in their lower halves: + //
0x0000_0001_XXXX_XXXXL, 0x0000_0000_XXXX_XXXXL...
+ // If necessary, divides the mantissa by appropriate power of 2 to make it normal. + // @param mantissa a buffer containing unpacked mantissa + // @return if the mantissa was not normal initially, a correction that should be added to the + // result's exponent, or 0 otherwise + private int normalizeMant(long[] mantissa) { + int expCorr = 31 - Long.numberOfLeadingZeros(mantissa[0]); + if (expCorr != 0) { + divBuffByPower2(mantissa, expCorr); + } + return expCorr; + } + + // Rounds up the contents of the unpacked buffer to 128 bits by adding unity one bit lower than + // the lowest of these 128 bits. If carry propagates up to bit 33 of buff[0], shifts the buffer + // rightwards to keep it normalized. + // @param mantissa the buffer to get rounded + // @return 1 if the buffer was shifted, 0 otherwise + private int roundUp(long[] mantissa) { + // due to the limited precision of the power of 2, a number with exactly half LSB in its + // mantissa + // (i.e that would have 0x8000_0000_0000_0000L in bits 128..191 if it were computed precisely), + // after multiplication by this power of 2, may get erroneous bits 185..191 (counting from the + // MSB), + // taking a value from + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x7FFF_FFFF_FFFF_FFD8L. + // to + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_0014L, or something alike. + // To round it up, we first add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x0000_0000_0000_0028L, to turn it into + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_00XXL, + // and then add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x8000_0000_0000_0000L, to provide carry to + // higher bits. + addToBuff(mantissa, 5, 100L); // to compensate possible inaccuracy + addToBuff(mantissa, 4, 0x8000_0000L); // round-up, if bits 128..159 >= 0x8000_0000L + if ((mantissa[0] & (HIGHER_32_BITS << 1L)) != 0L) { + // carry's got propagated beyond the highest bit + divBuffByPower2(mantissa, 1); + return 1; + } + return 0; + } + + // converts 192 most significant bits of the mantissa of a number from an unpacked quasidecimal + // form (where 32 least significant bits only used) to a packed quasidecimal form (where buff[0] + // contains the exponent and buff[1]..buff[3] contain 3 x 64 = 192 bits of mantissa) + // @param unpackedMant a buffer of at least 6 longs containing an unpacked value + // @param result a buffer of at least 4 long to hold the packed value + // @return packedQD192 with words 1..3 filled with the packed mantissa. packedQD192[0] is not + // affected. + private void pack_6x32_to_3x64(long[] unpackedMant, long[] result) { + result[1] = (unpackedMant[0] << 32L) + unpackedMant[1]; + result[2] = (unpackedMant[2] << 32L) + unpackedMant[3]; + result[3] = (unpackedMant[4] << 32L) + unpackedMant[5]; + } + + // Unpacks the mantissa of a 192-bit quasidecimal (4 longs: exp10, mantHi, mantMid, mantLo) to a + // buffer of 6 longs, where the least significant 32 bits of each long contains respective 32 bits + // of the mantissa + // @param qd192 array of 4 longs containing the number to unpack + // @param buff_6x32 buffer of 6 long to hold the unpacked mantissa + private void unpack_3x64_to_6x32(long[] qd192, long[] buff_6x32) { + buff_6x32[0] = ((qd192[1]) >>> (32L)); + buff_6x32[1] = qd192[1] & LOWER_32_BITS; + buff_6x32[2] = ((qd192[2]) >>> (32L)); + buff_6x32[3] = qd192[2] & LOWER_32_BITS; + buff_6x32[4] = ((qd192[3]) >>> (32L)); + buff_6x32[5] = qd192[3] & LOWER_32_BITS; + } + + // Divides the contents of the buffer by 2^exp2
+ // (shifts the buffer rightwards by exp2 if the exp2 is positive, and leftwards if it's negative), + // keeping it unpacked (only lower 32 bits of each element are used, except the buff[0] whose + // higher half is intended to contain integer part) + // @param buffer the buffer to divide + // @param exp2 the exponent of the power of two to divide by, expected to be + private void divBuffByPower2(long[] buffer, int exp2) { + int maxIdx = (buffer).length - 1; + long backShift = ((long) (32 - Math.abs(exp2))); + if (exp2 > 0) { // Shift to the right + long exp2Shift = ((long) (exp2)); + for (int i = (maxIdx + 1) - 1; i >= (1); i--) { + buffer[i] = ((buffer[i]) >>> (exp2Shift)) | ((buffer[i - 1] << backShift) & LOWER_32_BITS); + } + buffer[0] = ((buffer[0]) >>> (exp2Shift)); // Preserve the high half of buff[0] + } else if (exp2 < 0) { // Shift to the left + long exp2Shift = ((long) (-exp2)); + buffer[0] = + ((buffer[0] << exp2Shift) + | ((buffer[1]) >>> (backShift))); // Preserve the high half of buff[0] + for (int i = (1); i < (maxIdx); i++) { + buffer[i] = + (((buffer[i] << exp2Shift) & LOWER_32_BITS) | ((buffer[i + 1]) >>> (backShift))); + } + buffer[maxIdx] = (buffer[maxIdx] << exp2Shift) & LOWER_32_BITS; + } + } + + // Adds the summand to the idx'th word of the unpacked value stored in the buffer + // and propagates carry as necessary + // @param buff the buffer to add the summand to + // @param idx the index of the element to which the summand is to be added + // @param summand the summand to add to the idx'th element of the buffer + private void addToBuff(long[] buff, int idx, long summand) { + int maxIdx = idx; + buff[maxIdx] = (buff[maxIdx] + summand); // Big-endian, the lowest word + for (int i = (maxIdx + 1) - 1; + i >= (1); + i--) { // from the lowest word upwards, except the highest + if ((buff[i] & HIGHER_32_BITS) != 0L) { + buff[i] &= LOWER_32_BITS; + buff[i - 1] += 1L; + } else { + break; + } + } + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java new file mode 100644 index 00000000000..1af0ce1f04d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java @@ -0,0 +1,56 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a regular expression type in Firestore documents. */ +public final class RegexValue { + public final String pattern; + public final String options; + + public RegexValue(@NonNull String pattern, @NonNull String options) { + this.pattern = pattern; + this.options = options; + } + + /** + * Returns true if this RegexValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this RegexValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof RegexValue)) { + return false; + } + RegexValue other = (RegexValue) obj; + return pattern.equals(other.pattern) && options.equals(other.options); + } + + @Override + public int hashCode() { + return 31 * pattern.hashCode() + options.hashCode(); + } + + @Override + public String toString() { + return "RegexValue{pattern='" + pattern + "', options='" + options + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java index 297479d0262..e6170cb33d4 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java @@ -43,6 +43,7 @@ import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.NullValue; import com.google.type.LatLng; import java.util.ArrayList; @@ -443,6 +444,23 @@ private Value parseScalarValue(Object input, ParseContext context) { .build(); } else if (input instanceof VectorValue) { return parseVectorValue(((VectorValue) input), context); + + } else if (input instanceof MinKey) { + return parseMinKey(); + } else if (input instanceof MaxKey) { + return parseMaxKey(); + } else if (input instanceof BsonObjectId) { + return parseBsonObjectId((BsonObjectId) input); + } else if (input instanceof BsonTimestamp) { + return parseBsonTimestamp((BsonTimestamp) input); + } else if (input instanceof BsonBinaryData) { + return parseBsonBinary((BsonBinaryData) input); + } else if (input instanceof RegexValue) { + return parseRegexValue((RegexValue) input); + } else if (input instanceof Int32Value) { + return parseInteger32Value((Int32Value) input); + } else if (input instanceof Decimal128Value) { + return parseDecimal128Value((Decimal128Value) input); } else if (input.getClass().isArray()) { throw context.createError("Arrays are not supported; use a List instead"); } else { @@ -459,6 +477,88 @@ private Value parseVectorValue(VectorValue vector, ParseContext context) { return Value.newBuilder().setMapValue(mapBuilder).build(); } + private Value parseMinKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MIN_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseMaxKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MAX_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonObjectId(BsonObjectId objectId) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_OBJECT_ID_KEY, + Value.newBuilder().setStringValue((String) objectId.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonTimestamp(BsonTimestamp timestamp) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(timestamp.seconds).build()); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(timestamp.increment).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonBinary(BsonBinaryData binary) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder() + .setBytesValue( + ByteString.copyFrom(new byte[] {(byte) binary.subtype()}) + .concat(binary.dataAsByteString())) + .build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseRegexValue(RegexValue regex) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue(regex.pattern).build()); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue(regex.options).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_REGEX_KEY, Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseInteger32Value(Int32Value int32) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(int32.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseDecimal128Value(Decimal128Value decimal128) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_DECIMAL128_KEY, + Value.newBuilder().setStringValue(decimal128.stringValue).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + private Value parseTimestamp(Timestamp timestamp) { // Firestore backend truncates precision down to microseconds. To ensure offline mode works // the same with regards to truncation, perform the truncation immediately without waiting for diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java index d6ac7b90bba..08fb8292283 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java @@ -19,17 +19,24 @@ import static com.google.firebase.firestore.model.Values.TYPE_ORDER_ARRAY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BLOB; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BOOLEAN; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_BINARY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_OBJECT_ID; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_GEOPOINT; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAP; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAX_KEY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MIN_KEY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NULL; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NUMBER; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REFERENCE; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REGEX; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_SERVER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_STRING; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_VECTOR; import static com.google.firebase.firestore.model.Values.typeOrder; import static com.google.firebase.firestore.util.Assert.fail; +import static com.google.firestore.v1.Value.ValueTypeCase.MAP_VALUE; import androidx.annotation.RestrictTo; import com.google.firebase.Timestamp; @@ -39,6 +46,7 @@ import com.google.firebase.firestore.util.Logger; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -78,6 +86,13 @@ public Object convertValue(Value value) { case TYPE_ORDER_BOOLEAN: return value.getBooleanValue(); case TYPE_ORDER_NUMBER: + if (value.getValueTypeCase() == MAP_VALUE) { + if (Values.isInt32Value(value)) { + return convertInt32(value.getMapValue().getFieldsMap()); + } else if (Values.isDecimal128Value(value)) { + return convertDecimal128(value.getMapValue().getFieldsMap()); + } + } return value.getValueTypeCase().equals(Value.ValueTypeCase.INTEGER_VALUE) ? (Object) value.getIntegerValue() // Cast to Object to prevent type coercion to double : (Object) value.getDoubleValue(); @@ -90,6 +105,19 @@ public Object convertValue(Value value) { value.getGeoPointValue().getLatitude(), value.getGeoPointValue().getLongitude()); case TYPE_ORDER_VECTOR: return convertVectorValue(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_OBJECT_ID: + return convertBsonObjectId(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_TIMESTAMP: + return convertBsonTimestamp(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_BINARY: + return convertBsonBinary(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_REGEX: + return convertRegex(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_MAX_KEY: + return MaxKey.instance(); + case TYPE_ORDER_MIN_KEY: + return MinKey.instance(); + default: throw fail("Unknown value type: " + value.getValueTypeCase()); } @@ -115,6 +143,47 @@ VectorValue convertVectorValue(Map mapValue) { return new VectorValue(doubles); } + BsonObjectId convertBsonObjectId(Map mapValue) { + return new BsonObjectId(mapValue.get(Values.RESERVED_OBJECT_ID_KEY).getStringValue()); + } + + BsonTimestamp convertBsonTimestamp(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + return new BsonTimestamp( + fields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(), + fields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue()); + } + + BsonBinaryData convertBsonBinary(Map mapValue) { + ByteString bytes = mapValue.get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue(); + // Note: A byte is interpreted as a signed 8-bit value. Since values larger than 127 have a + // leading '1' bit, simply casting them to integer results in sign-extension and lead to a + // negative integer value. For example, the byte `0x80` casted to `int` results in `-128`, + // rather than `128`, and the byte `0xFF` casted to `int` will be `-1` rather than `255`. + // Since we want the `subtype` to be an unsigned byte, we need to perform 0-extension (rather + // than sign-extension) to convert it to an int. + int subtype = bytes.byteAt(0) & 0xFF; + return BsonBinaryData.fromByteString(subtype, bytes.substring(1)); + } + + RegexValue convertRegex(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + return new RegexValue( + fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue(), + fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + } + + Int32Value convertInt32(Map mapValue) { + return new Int32Value((int) mapValue.get(Values.RESERVED_INT32_KEY).getIntegerValue()); + } + + Decimal128Value convertDecimal128(Map mapValue) { + return new Decimal128Value(mapValue.get(Values.RESERVED_DECIMAL128_KEY).getStringValue()); + } + private Object convertServerTimestamp(Value serverTimestampValue) { switch (serverTimestampBehavior) { case PREVIOUS: diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java index d058e15659e..75879d77b3e 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java @@ -16,8 +16,8 @@ import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS; import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS_ANY; -import static com.google.firebase.firestore.model.Values.MAX_VALUE; -import static com.google.firebase.firestore.model.Values.MIN_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MAX_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MIN_VALUE; import static com.google.firebase.firestore.model.Values.lowerBoundCompare; import static com.google.firebase.firestore.model.Values.upperBoundCompare; @@ -184,7 +184,7 @@ private List getFieldFiltersForPath(FieldPath path) { /** * Returns a lower bound of field values that can be used as a starting point to scan the index - * defined by {@code fieldIndex}. Returns {@link Values#MIN_VALUE} if no lower bound exists. + * defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MIN_VALUE} if no lower bound exists. */ public Bound getLowerBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -206,7 +206,7 @@ public Bound getLowerBound(FieldIndex fieldIndex) { /** * Returns an upper bound of field values that can be used as an ending point when scanning the - * index defined by {@code fieldIndex}. Returns {@link Values#MAX_VALUE} if no upper bound exists. + * index defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MAX_VALUE} if no upper bound exists. */ public Bound getUpperBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -235,12 +235,12 @@ public Bound getUpperBound(FieldIndex fieldIndex) { */ private Pair getAscendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MIN_VALUE; + Value segmentValue = INTERNAL_MIN_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MIN_VALUE; + Value filterValue = INTERNAL_MIN_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -259,7 +259,7 @@ private Pair getAscendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MIN_VALUE; + filterValue = Values.MIN_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. @@ -300,12 +300,12 @@ private Pair getAscendingBound( */ private Pair getDescendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MAX_VALUE; + Value segmentValue = INTERNAL_MAX_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MAX_VALUE; + Value filterValue = INTERNAL_MAX_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -325,7 +325,7 @@ private Pair getDescendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MAX_VALUE; + filterValue = Values.MAX_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java index f275634957a..80ca51b9224 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java @@ -14,36 +14,48 @@ package com.google.firebase.firestore.index; +import static com.google.firebase.firestore.model.Values.NULL_VALUE; + import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.model.Values; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import com.google.type.LatLng; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** Firestore index value writer. */ public class FirestoreIndexValueWriter { - // Note: This code is copied from the backend. Code that is not used by Firestore was removed. + // Note: This file is copied from the backend. Code that is not used by + // Firestore was removed. Code that has different behavior was modified. // The client SDK only supports references to documents from the same database. We can skip the // first five segments. public static final int DOCUMENT_NAME_OFFSET = 5; public static final int INDEX_TYPE_NULL = 5; + public static final int INDEX_TYPE_MIN_KEY = 7; public static final int INDEX_TYPE_BOOLEAN = 10; public static final int INDEX_TYPE_NAN = 13; public static final int INDEX_TYPE_NUMBER = 15; public static final int INDEX_TYPE_TIMESTAMP = 20; + public static final int INDEX_TYPE_BSON_TIMESTAMP = 22; public static final int INDEX_TYPE_STRING = 25; public static final int INDEX_TYPE_BLOB = 30; + public static final int INDEX_TYPE_BSON_BINARY = 31; public static final int INDEX_TYPE_REFERENCE = 37; + public static final int INDEX_TYPE_BSON_OBJECT_ID = 43; public static final int INDEX_TYPE_GEOPOINT = 45; + public static final int INDEX_TYPE_REGEX = 47; public static final int INDEX_TYPE_ARRAY = 50; public static final int INDEX_TYPE_VECTOR = 53; public static final int INDEX_TYPE_MAP = 55; public static final int INDEX_TYPE_REFERENCE_SEGMENT = 60; + public static final int INDEX_TYPE_MAX_KEY = 999; // A terminator that indicates that a truncatable value was not truncated. // This must be smaller than all other type labels. @@ -70,17 +82,7 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeLong(indexValue.getBooleanValue() ? 1 : 0); break; case DOUBLE_VALUE: - double number = indexValue.getDoubleValue(); - if (Double.isNaN(number)) { - writeValueTypeLabel(encoder, INDEX_TYPE_NAN); - break; - } - writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); - if (number == -0.0) { - encoder.writeDouble(0.0); // -0.0, 0 and 0.0 are all considered the same - } else { - encoder.writeDouble(number); - } + writeIndexDouble(indexValue.getDoubleValue(), encoder); break; case INTEGER_VALUE: writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); @@ -88,10 +90,7 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeDouble(indexValue.getIntegerValue()); break; case TIMESTAMP_VALUE: - Timestamp timestamp = indexValue.getTimestampValue(); - writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); - encoder.writeLong(timestamp.getSeconds()); - encoder.writeLong(timestamp.getNanos()); + writeIndexTimestamp(indexValue.getTimestampValue(), encoder); break; case STRING_VALUE: writeIndexString(indexValue.getStringValue(), encoder); @@ -106,21 +105,54 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en writeIndexEntityRef(indexValue.getReferenceValue(), encoder); break; case GEO_POINT_VALUE: - LatLng geoPoint = indexValue.getGeoPointValue(); - writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); - encoder.writeDouble(geoPoint.getLatitude()); - encoder.writeDouble(geoPoint.getLongitude()); + writeIndexGeoPoint(indexValue.getGeoPointValue(), encoder); break; case MAP_VALUE: - if (Values.isMaxValue(indexValue)) { - writeValueTypeLabel(encoder, Integer.MAX_VALUE); - break; - } else if (Values.isVectorValue(indexValue)) { - writeIndexVector(indexValue.getMapValue(), encoder); - break; + Values.MapRepresentation mapType = Values.detectMapRepresentation(indexValue); + switch (mapType) { + case INTERNAL_MAX: + writeValueTypeLabel(encoder, Integer.MAX_VALUE); + break; + case VECTOR: + writeIndexVector(indexValue.getMapValue(), encoder); + break; + case REGEX: + writeIndexRegex(indexValue.getMapValue(), encoder); + break; + case BSON_TIMESTAMP: + writeIndexBsonTimestamp(indexValue.getMapValue(), encoder); + break; + case BSON_OBJECT_ID: + writeIndexBsonObjectId(indexValue.getMapValue(), encoder); + break; + case BSON_BINARY: + writeIndexBsonBinaryData(indexValue.getMapValue(), encoder); + break; + case INT32: + writeIndexInt32(indexValue.getMapValue(), encoder); + break; + case DECIMAL128: + // Double and Decimal128 sort the same + // Decimal128 is written as double with precision lost + double number = + Double.parseDouble( + indexValue + .getMapValue() + .getFieldsMap() + .get(Values.RESERVED_DECIMAL128_KEY) + .getStringValue()); + writeIndexDouble(number, encoder); + break; + case MIN_KEY: + writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); + break; + case MAX_KEY: + writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); + break; + default: + writeIndexMap(indexValue.getMapValue(), encoder); + writeTruncationMarker(encoder); } - writeIndexMap(indexValue.getMapValue(), encoder); - writeTruncationMarker(encoder); break; case ARRAY_VALUE: writeIndexArray(indexValue.getArrayValue(), encoder); @@ -142,6 +174,37 @@ private void writeUnlabeledIndexString( encoder.writeString(stringIndexValue); } + private void writeIndexDouble(double number, DirectionalIndexByteEncoder encoder) { + if (Double.isNaN(number)) { + writeValueTypeLabel(encoder, INDEX_TYPE_NAN); + return; + } + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + if (number == -0.0) { + encoder.writeDouble(0.0); // -0.0, 0 and 0.0 are all considered the same + } else { + encoder.writeDouble(number); + } + } + + private void writeIndexInt32(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + // Double and Int32 sort the same + encoder.writeDouble(mapValue.getFieldsMap().get(Values.RESERVED_INT32_KEY).getIntegerValue()); + } + + private void writeIndexTimestamp(Timestamp timestamp, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); + encoder.writeLong(timestamp.getSeconds()); + encoder.writeLong(timestamp.getNanos()); + } + + private void writeIndexGeoPoint(LatLng geoPoint, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); + encoder.writeDouble(geoPoint.getLatitude()); + encoder.writeDouble(geoPoint.getLongitude()); + } + private void writeIndexVector(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { Map map = mapIndexValue.getFieldsMap(); String key = Values.VECTOR_MAP_VECTORS_KEY; @@ -157,6 +220,53 @@ private void writeIndexVector(MapValue mapIndexValue, DirectionalIndexByteEncode this.writeIndexValueAux(map.get(key), encoder); } + private void writeIndexRegex(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_REGEX); + + Map fields = + mapIndexValue.getFieldsMap().get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + encoder.writeString(fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue()); + encoder.writeString(fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + writeTruncationMarker(encoder); + } + + private void writeIndexBsonTimestamp(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_TIMESTAMP); + + Map timestampFields = + mapValue + .getFieldsMap() + .get(Values.RESERVED_BSON_TIMESTAMP_KEY) + .getMapValue() + .getFieldsMap(); + + long unsignedSeconds = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long unsignedIncrement = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + // BSON Timestamps are encoded as a 64-bit long with the lower 32 bits being the increment + // and the upper 32 bits being the seconds + long value = (unsignedSeconds << 32) | (unsignedIncrement & 0xFFFFFFFFL); + + encoder.writeLong(value); + } + + private void writeIndexBsonObjectId(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_OBJECT_ID); + + String oid = mapValue.getFieldsMap().get(Values.RESERVED_OBJECT_ID_KEY).getStringValue(); + encoder.writeBytes(ByteString.copyFrom(oid.getBytes())); + } + + private void writeIndexBsonBinaryData(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_BINARY); + + encoder.writeBytes( + mapValue.getFieldsMap().get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue()); + writeTruncationMarker(encoder); + } + private void writeIndexMap(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_MAP); for (Map.Entry entry : mapIndexValue.getFieldsMap().entrySet()) { @@ -177,7 +287,15 @@ private void writeIndexArray(ArrayValue arrayIndexValue, DirectionalIndexByteEnc private void writeIndexEntityRef(String referenceValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE); - ResourcePath path = ResourcePath.fromString(referenceValue); + List segments = new ArrayList<>(); + String[] parts = referenceValue.split("/"); + for (String part : parts) { + if (!part.isEmpty()) { + segments.add(part); + } + } + ResourcePath path = ResourcePath.fromSegments(segments); + int numSegments = path.length(); for (int index = DOCUMENT_NAME_OFFSET; index < numSegments; ++index) { String segment = path.getSegment(index); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java index 834fb2454a3..2f39c96a826 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java @@ -15,11 +15,11 @@ package com.google.firebase.firestore.model; import static com.google.firebase.firestore.model.ServerTimestamps.getLocalWriteTime; -import static com.google.firebase.firestore.model.ServerTimestamps.isServerTimestamp; import static com.google.firebase.firestore.util.Assert.fail; import static com.google.firebase.firestore.util.Assert.hardAssert; import androidx.annotation.Nullable; +import com.google.firebase.firestore.Quadruple; import com.google.firebase.firestore.util.Util; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.ArrayValueOrBuilder; @@ -38,19 +38,54 @@ public class Values { public static final String TYPE_KEY = "__type__"; + + public static final String RESERVED_VECTOR_KEY = "__vector__"; + // For MinKey type + public static final String RESERVED_MIN_KEY = "__min__"; + + // For MaxKey type + public static final String RESERVED_MAX_KEY = "__max__"; + + // For Regex type + public static final String RESERVED_REGEX_KEY = "__regex__"; + public static final String RESERVED_REGEX_PATTERN_KEY = "pattern"; + public static final String RESERVED_REGEX_OPTIONS_KEY = "options"; + + // For ObjectId type + public static final String RESERVED_OBJECT_ID_KEY = "__oid__"; + + // For Int32 type + public static final String RESERVED_INT32_KEY = "__int__"; + + // For Decimal128 type. + public static final String RESERVED_DECIMAL128_KEY = "__decimal128__"; + + // For RequestTimestamp + public static final String RESERVED_BSON_TIMESTAMP_KEY = "__request_timestamp__"; + + public static final String RESERVED_BSON_TIMESTAMP_SECONDS_KEY = "seconds"; + public static final String RESERVED_BSON_TIMESTAMP_INCREMENT_KEY = "increment"; + + // For BSON Binary Data + public static final String RESERVED_BSON_BINARY_KEY = "__binary__"; + + public static final String RESERVED_SERVER_TIMESTAMP_KEY = "server_timestamp"; + public static final Value NAN_VALUE = Value.newBuilder().setDoubleValue(Double.NaN).build(); public static final Value NULL_VALUE = Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build(); - public static final Value MIN_VALUE = NULL_VALUE; - public static final Value MAX_VALUE_TYPE = Value.newBuilder().setStringValue("__max__").build(); - public static final Value MAX_VALUE = + public static final Value INTERNAL_MIN_VALUE = NULL_VALUE; + public static final Value MAX_VALUE_TYPE = + Value.newBuilder().setStringValue(RESERVED_MAX_KEY).build(); + public static final Value INTERNAL_MAX_VALUE = Value.newBuilder() .setMapValue(MapValue.newBuilder().putFields(TYPE_KEY, MAX_VALUE_TYPE)) .build(); public static final Value VECTOR_VALUE_TYPE = - Value.newBuilder().setStringValue("__vector__").build(); + Value.newBuilder().setStringValue(RESERVED_VECTOR_KEY).build(); public static final String VECTOR_MAP_VECTORS_KEY = "value"; + private static final Value MIN_VECTOR_VALUE = Value.newBuilder() .setMapValue( @@ -63,21 +98,27 @@ public class Values { /** * The order of types in Firestore. This order is based on the backend's ordering, but modified to - * support server timestamps and {@link #MAX_VALUE}. + * support server timestamps and {@link #INTERNAL_MAX_VALUE}. */ public static final int TYPE_ORDER_NULL = 0; - public static final int TYPE_ORDER_BOOLEAN = 1; - public static final int TYPE_ORDER_NUMBER = 2; - public static final int TYPE_ORDER_TIMESTAMP = 3; - public static final int TYPE_ORDER_SERVER_TIMESTAMP = 4; - public static final int TYPE_ORDER_STRING = 5; - public static final int TYPE_ORDER_BLOB = 6; - public static final int TYPE_ORDER_REFERENCE = 7; - public static final int TYPE_ORDER_GEOPOINT = 8; - public static final int TYPE_ORDER_ARRAY = 9; - public static final int TYPE_ORDER_VECTOR = 10; - public static final int TYPE_ORDER_MAP = 11; + public static final int TYPE_ORDER_MIN_KEY = 1; + public static final int TYPE_ORDER_BOOLEAN = 2; + public static final int TYPE_ORDER_NUMBER = 3; + public static final int TYPE_ORDER_TIMESTAMP = 4; + public static final int TYPE_ORDER_BSON_TIMESTAMP = 5; + public static final int TYPE_ORDER_SERVER_TIMESTAMP = 6; + public static final int TYPE_ORDER_STRING = 7; + public static final int TYPE_ORDER_BLOB = 8; + public static final int TYPE_ORDER_BSON_BINARY = 9; + public static final int TYPE_ORDER_REFERENCE = 10; + public static final int TYPE_ORDER_BSON_OBJECT_ID = 11; + public static final int TYPE_ORDER_GEOPOINT = 12; + public static final int TYPE_ORDER_REGEX = 13; + public static final int TYPE_ORDER_ARRAY = 14; + public static final int TYPE_ORDER_VECTOR = 15; + public static final int TYPE_ORDER_MAP = 16; + public static final int TYPE_ORDER_MAX_KEY = 17; public static final int TYPE_ORDER_MAX_VALUE = Integer.MAX_VALUE; @@ -89,7 +130,6 @@ public static int typeOrder(Value value) { case BOOLEAN_VALUE: return TYPE_ORDER_BOOLEAN; case INTEGER_VALUE: - return TYPE_ORDER_NUMBER; case DOUBLE_VALUE: return TYPE_ORDER_NUMBER; case TIMESTAMP_VALUE: @@ -105,14 +145,31 @@ public static int typeOrder(Value value) { case ARRAY_VALUE: return TYPE_ORDER_ARRAY; case MAP_VALUE: - if (isServerTimestamp(value)) { - return TYPE_ORDER_SERVER_TIMESTAMP; - } else if (isMaxValue(value)) { - return TYPE_ORDER_MAX_VALUE; - } else if (isVectorValue(value)) { - return TYPE_ORDER_VECTOR; - } else { - return TYPE_ORDER_MAP; + MapRepresentation mapType = detectMapRepresentation(value); + switch (mapType) { + case SERVER_TIMESTAMP: + return TYPE_ORDER_SERVER_TIMESTAMP; + case INTERNAL_MAX: + return TYPE_ORDER_MAX_VALUE; + case VECTOR: + return TYPE_ORDER_VECTOR; + case MIN_KEY: + return TYPE_ORDER_MIN_KEY; + case MAX_KEY: + return TYPE_ORDER_MAX_KEY; + case REGEX: + return TYPE_ORDER_REGEX; + case BSON_TIMESTAMP: + return TYPE_ORDER_BSON_TIMESTAMP; + case BSON_OBJECT_ID: + return TYPE_ORDER_BSON_OBJECT_ID; + case BSON_BINARY: + return TYPE_ORDER_BSON_BINARY; + case INT32: + case DECIMAL128: + return TYPE_ORDER_NUMBER; + default: + return TYPE_ORDER_MAP; } default: throw fail("Invalid value type: " + value.getValueTypeCase()); @@ -145,6 +202,9 @@ public static boolean equals(Value left, Value right) { case TYPE_ORDER_SERVER_TIMESTAMP: return getLocalWriteTime(left).equals(getLocalWriteTime(right)); case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_NULL: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return true; default: return left.equals(right); @@ -152,18 +212,43 @@ public static boolean equals(Value left, Value right) { } private static boolean numberEquals(Value left, Value right) { - if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE - && right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return left.getIntegerValue() == right.getIntegerValue(); - } else if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE - && right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + if ((isInt64Value(left) && isInt64Value(right)) + || (isInt32Value(left) && isInt32Value(right))) { + return getIntegerValue(left) == getIntegerValue(right); + } else if (isDouble(left) && isDouble(right)) { return Double.doubleToLongBits(left.getDoubleValue()) == Double.doubleToLongBits(right.getDoubleValue()); + } else if (isDecimal128Value(left) && isDecimal128Value(right)) { + Quadruple leftQuadruple = Quadruple.fromString(getDecimal128StringValue(left)); + Quadruple rightQuadruple = Quadruple.fromString(getDecimal128StringValue(right)); + return Util.compareQuadruples(leftQuadruple, rightQuadruple) == 0; } return false; } + /** + * Returns a long from a 32-bit or 64-bit proto integer value. Throws an exception if the value is + * not an integer. + */ + private static long getIntegerValue(Value value) { + if (value.hasIntegerValue()) { + return value.getIntegerValue(); + } + if (isInt32Value(value)) { + return value.getMapValue().getFieldsMap().get(RESERVED_INT32_KEY).getIntegerValue(); + } + throw new IllegalArgumentException("getIntegerValue was called with a non-integer argument"); + } + + private static String getDecimal128StringValue(Value value) { + if (isDecimal128Value(value)) { + return value.getMapValue().getFieldsMap().get(RESERVED_DECIMAL128_KEY).getStringValue(); + } + throw new IllegalArgumentException( + "getDecimal128Value was called with a non-decimal128 argument"); + } + private static boolean arrayEquals(Value left, Value right) { ArrayValue leftArray = left.getArrayValue(); ArrayValue rightArray = right.getArrayValue(); @@ -220,6 +305,8 @@ public static int compare(Value left, Value right) { switch (leftType) { case TYPE_ORDER_NULL: case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return 0; case TYPE_ORDER_BOOLEAN: return Util.compareBooleans(left.getBooleanValue(), right.getBooleanValue()); @@ -243,6 +330,14 @@ public static int compare(Value left, Value right) { return compareMaps(left.getMapValue(), right.getMapValue()); case TYPE_ORDER_VECTOR: return compareVectors(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_REGEX: + return compareRegex(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_OBJECT_ID: + return compareBsonObjectId(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_TIMESTAMP: + return compareBsonTimestamp(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_BINARY: + return compareBsonBinary(left.getMapValue(), right.getMapValue()); default: throw fail("Invalid value type: " + leftType); } @@ -281,18 +376,32 @@ public static int upperBoundCompare( } private static int compareNumbers(Value left, Value right) { - if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // If either argument is Decimal128, we cast both to wider (128-bit) representation, and compare + // Quadruple values. + if (isDecimal128Value(left) || isDecimal128Value(right)) { + Quadruple leftQuadruple = convertNumberToQuadruple(left); + Quadruple rightQuadruple = convertNumberToQuadruple(right); + return Util.compareQuadruples(leftQuadruple, rightQuadruple); + } + + if (isDouble(left)) { double leftDouble = left.getDoubleValue(); - if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + if (isDouble(right)) { + // left and right are both doubles. return Util.compareDoubles(leftDouble, right.getDoubleValue()); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareMixed(leftDouble, right.getIntegerValue()); + } else if (isIntegerValue(right)) { + // left is a double and right is a 32/64-bit integer value. + return Util.compareMixed(leftDouble, getIntegerValue(right)); } - } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - long leftLong = left.getIntegerValue(); - if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareLongs(leftLong, right.getIntegerValue()); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + } + + if (isIntegerValue(left)) { + long leftLong = getIntegerValue(left); + if (isIntegerValue(right)) { + // left and right both a 32/64-bit integer value. + return Util.compareLongs(leftLong, getIntegerValue(right)); + } else if (isDouble(right)) { + // left is a 32/64-bit integer and right is a double . return -1 * Util.compareMixed(right.getDoubleValue(), leftLong); } } @@ -300,6 +409,30 @@ private static int compareNumbers(Value left, Value right) { throw fail("Unexpected values: %s vs %s", left, right); } + /** + * Converts the given number value to a Quadruple. Throws an exception if the value is not a + * number. + */ + private static Quadruple convertNumberToQuadruple(Value value) { + // Doubles + if (isDouble(value)) { + return Quadruple.fromDouble(value.getDoubleValue()); + } + + // 64-bit or 32-bit integers. + if (isInt64Value(value) || isInt32Value(value)) { + return Quadruple.fromLong(getIntegerValue(value)); + } + + // Decimal128 numbers + if (isDecimal128Value(value)) { + return Quadruple.fromString(getDecimal128StringValue(value)); + } + + throw new IllegalArgumentException( + "convertNumberToQuadruple was called on a non-numeric value."); + } + private static int compareTimestamps(Timestamp left, Timestamp right) { int cmp = Util.compareLongs(left.getSeconds(), right.getSeconds()); if (cmp != 0) { @@ -363,6 +496,54 @@ private static int compareMaps(MapValue left, MapValue right) { return Util.compareBooleans(iterator1.hasNext(), iterator2.hasNext()); } + private static int compareRegex(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + String leftPattern = leftMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + String rightPattern = rightMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + + int comp = Util.compareUtf8Strings(leftPattern, rightPattern); + if (comp != 0) return comp; + + String leftOption = leftMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + String rightOption = rightMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + + return leftOption.compareTo(rightOption); + } + + private static int compareBsonObjectId(MapValue left, MapValue right) { + String lhs = left.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + String rhs = right.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + return Util.compareUtf8Strings(lhs, rhs); + } + + private static int compareBsonTimestamp(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + + long leftSeconds = leftMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long rightSeconds = rightMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + + int comp = Util.compareLongs(leftSeconds, rightSeconds); + if (comp != 0) return comp; + + long leftIncrement = leftMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + long rightIncrement = rightMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + return Util.compareLongs(leftIncrement, rightIncrement); + } + + private static int compareBsonBinary(MapValue left, MapValue right) { + ByteString lhs = left.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + ByteString rhs = right.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + return Util.compareByteStrings(lhs, rhs); + } + private static int compareVectors(MapValue left, MapValue right) { Map leftMap = left.getFieldsMap(); Map rightMap = right.getFieldsMap(); @@ -396,7 +577,7 @@ private static void canonifyValue(StringBuilder builder, Value value) { builder.append(value.getBooleanValue()); break; case INTEGER_VALUE: - builder.append(value.getIntegerValue()); + builder.append(getIntegerValue(value)); break; case DOUBLE_VALUE: builder.append(value.getDoubleValue()); @@ -473,7 +654,7 @@ private static void canonifyArray(StringBuilder builder, ArrayValue arrayValue) } /** Returns true if `value` is a INTEGER_VALUE. */ - public static boolean isInteger(@Nullable Value value) { + public static boolean isInt64Value(@Nullable Value value) { return value != null && value.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE; } @@ -484,7 +665,12 @@ public static boolean isDouble(@Nullable Value value) { /** Returns true if `value` is either a INTEGER_VALUE or a DOUBLE_VALUE. */ public static boolean isNumber(@Nullable Value value) { - return isInteger(value) || isDouble(value); + return isInt64Value(value) || isDouble(value); + } + + /** Returns true if `value` is a INTEGER_VALUE or a Int32 Value. */ + public static boolean isIntegerValue(@Nullable Value value) { + return isInt64Value(value) || isInt32Value(value); } /** Returns true if `value` is an ARRAY_VALUE. */ @@ -501,7 +687,20 @@ public static boolean isNullValue(@Nullable Value value) { } public static boolean isNanValue(@Nullable Value value) { - return value != null && Double.isNaN(value.getDoubleValue()); + if (value != null && Double.isNaN(value.getDoubleValue())) { + return true; + } + + if (isDecimal128Value(value)) { + return value + .getMapValue() + .getFieldsMap() + .get(RESERVED_DECIMAL128_KEY) + .getStringValue() + .equals("NaN"); + } + + return false; } public static boolean isMapValue(@Nullable Value value) { @@ -537,6 +736,80 @@ public static Value refValue(DatabaseId databaseId, DocumentKey key) { public static Value MIN_MAP = Value.newBuilder().setMapValue(MapValue.getDefaultInstance()).build(); + public static Value MIN_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MAX_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MIN_BSON_OBJECT_ID_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields(RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("").build())) + .build(); + + public static Value MIN_BSON_TIMESTAMP_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + // Both seconds and increment are 32 bit unsigned integers + .putFields( + RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(0).build()) + .putFields( + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(0).build())) + .build())) + .build(); + + public static Value MIN_BSON_BINARY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_BINARY_KEY, + // bsonBinaryValue should have at least one byte as subtype + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {0})) + .build())) + .build(); + + public static Value MIN_REGEX_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("").build()) + .putFields( + RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("").build())) + .build())) + .build(); + /** Returns the lowest value for the given value type (inclusive). */ public static Value getLowerBound(Value value) { switch (value.getValueTypeCase()) { @@ -560,11 +833,30 @@ public static Value getLowerBound(Value value) { case ARRAY_VALUE: return MIN_ARRAY; case MAP_VALUE: + MapRepresentation mapType = detectMapRepresentation(value); // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { - return MIN_VECTOR_VALUE; + switch (mapType) { + case VECTOR: + return MIN_VECTOR_VALUE; + case BSON_OBJECT_ID: + return MIN_BSON_OBJECT_ID_VALUE; + case BSON_TIMESTAMP: + return MIN_BSON_TIMESTAMP_VALUE; + case BSON_BINARY: + return MIN_BSON_BINARY_VALUE; + case REGEX: + return MIN_REGEX_VALUE; + case INT32: + case DECIMAL128: + // Int32Value and Decimal128Value are treated the same as integerValue and doubleValue + return MIN_NUMBER; + case MIN_KEY: + return MIN_KEY_VALUE; + case MAX_KEY: + return MAX_KEY_VALUE; + default: + return MIN_MAP; } - return MIN_MAP; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } @@ -574,42 +866,189 @@ public static Value getLowerBound(Value value) { public static Value getUpperBound(Value value) { switch (value.getValueTypeCase()) { case NULL_VALUE: - return MIN_BOOLEAN; + return MIN_KEY_VALUE; case BOOLEAN_VALUE: return MIN_NUMBER; case INTEGER_VALUE: case DOUBLE_VALUE: return MIN_TIMESTAMP; case TIMESTAMP_VALUE: - return MIN_STRING; + return MIN_BSON_TIMESTAMP_VALUE; case STRING_VALUE: return MIN_BYTES; case BYTES_VALUE: - return MIN_REFERENCE; + return MIN_BSON_BINARY_VALUE; case REFERENCE_VALUE: - return MIN_GEO_POINT; + return MIN_BSON_OBJECT_ID_VALUE; case GEO_POINT_VALUE: - return MIN_ARRAY; + return MIN_REGEX_VALUE; case ARRAY_VALUE: return MIN_VECTOR_VALUE; case MAP_VALUE: - // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { - return MIN_MAP; + MapRepresentation mapType = detectMapRepresentation(value); + switch (mapType) { + case VECTOR: + return MIN_MAP; + case BSON_OBJECT_ID: + return MIN_GEO_POINT; + case BSON_TIMESTAMP: + return MIN_STRING; + case BSON_BINARY: + return MIN_REFERENCE; + case REGEX: + return MIN_ARRAY; + case INT32: + case DECIMAL128: + // Int32Value and decimal128Value are treated the same as integerValue and doubleValue + return MIN_TIMESTAMP; + case MIN_KEY: + return MIN_BOOLEAN; + case MAX_KEY: + return INTERNAL_MAX_VALUE; + default: + return MAX_KEY_VALUE; } - return MAX_VALUE; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } } - /** Returns true if the Value represents the canonical {@link #MAX_VALUE} . */ - public static boolean isMaxValue(Value value) { - return MAX_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + private static boolean isMapWithSingleFieldOfType( + Value value, String key, Value.ValueTypeCase typeCase) { + if (value == null + || value.getMapValue() == null + || value.getMapValue().getFieldsMap() == null) { + return false; + } + + Map fields = value.getMapValue().getFieldsMap(); + return fields.size() == 1 + && fields.containsKey(key) + && fields.get(key).getValueTypeCase() == typeCase; } - /** Returns true if the Value represents a VectorValue . */ - public static boolean isVectorValue(Value value) { - return VECTOR_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + static boolean isMinKey(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_MIN_KEY, Value.ValueTypeCase.NULL_VALUE); + } + + static boolean isMaxKey(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_MAX_KEY, Value.ValueTypeCase.NULL_VALUE); + } + + public static boolean isInt32Value(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_INT32_KEY, Value.ValueTypeCase.INTEGER_VALUE); + } + + public static boolean isDecimal128Value(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_DECIMAL128_KEY, Value.ValueTypeCase.STRING_VALUE); + } + + static boolean isBsonObjectId(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_OBJECT_ID_KEY, Value.ValueTypeCase.STRING_VALUE); + } + + static boolean isBsonBinaryData(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_BSON_BINARY_KEY, Value.ValueTypeCase.BYTES_VALUE); + } + + static boolean isRegexValue(Value value) { + if (!isMapWithSingleFieldOfType(value, RESERVED_REGEX_KEY, Value.ValueTypeCase.MAP_VALUE)) { + return false; + } + + MapValue innerMapValue = + value.getMapValue().getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_REGEX_PATTERN_KEY) + && values.containsKey(RESERVED_REGEX_OPTIONS_KEY) + && values.get(RESERVED_REGEX_PATTERN_KEY).hasStringValue() + && values.get(RESERVED_REGEX_OPTIONS_KEY).hasStringValue(); + } + + static boolean isBsonTimestamp(Value value) { + if (!isMapWithSingleFieldOfType( + value, RESERVED_BSON_TIMESTAMP_KEY, Value.ValueTypeCase.MAP_VALUE)) { + return false; + } + + MapValue innerMapValue = + value.getMapValue().getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_BSON_TIMESTAMP_SECONDS_KEY) + && values.containsKey(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY) + && values.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).hasIntegerValue() + && values.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).hasIntegerValue(); + } + + public enum MapRepresentation { + REGEX, + BSON_OBJECT_ID, + INT32, + DECIMAL128, + BSON_TIMESTAMP, + BSON_BINARY, + MIN_KEY, + MAX_KEY, + INTERNAL_MAX, + VECTOR, + SERVER_TIMESTAMP, + REGULAR_MAP + } + + public static MapRepresentation detectMapRepresentation(Value value) { + if (value == null + || value.getMapValue() == null + || value.getMapValue().getFieldsMap() == null) { + return MapRepresentation.REGULAR_MAP; + } + + // Check for BSON-related mappings + if (isRegexValue(value)) { + return MapRepresentation.REGEX; + } + if (isBsonObjectId(value)) { + return MapRepresentation.BSON_OBJECT_ID; + } + if (isInt32Value(value)) { + return MapRepresentation.INT32; + } + if (isDecimal128Value(value)) { + return MapRepresentation.DECIMAL128; + } + if (isBsonTimestamp(value)) { + return MapRepresentation.BSON_TIMESTAMP; + } + if (isBsonBinaryData(value)) { + return MapRepresentation.BSON_BINARY; + } + if (isMinKey(value)) { + return MapRepresentation.MIN_KEY; + } + if (isMaxKey(value)) { + return MapRepresentation.MAX_KEY; + } + + Map fields = value.getMapValue().getFieldsMap(); + + // Check for type-based mappings + if (fields.containsKey(TYPE_KEY)) { + String typeString = fields.get(TYPE_KEY).getStringValue(); + if (typeString.equals(RESERVED_VECTOR_KEY)) { + return MapRepresentation.VECTOR; + } + if (typeString.equals(RESERVED_MAX_KEY)) { + return MapRepresentation.INTERNAL_MAX; + } + if (typeString.equals(RESERVED_SERVER_TIMESTAMP_KEY)) { + return MapRepresentation.SERVER_TIMESTAMP; + } + } + + return MapRepresentation.REGULAR_MAP; } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java index 0dae39ae03d..d690c71c32a 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java @@ -15,7 +15,7 @@ package com.google.firebase.firestore.model.mutation; import static com.google.firebase.firestore.model.Values.isDouble; -import static com.google.firebase.firestore.model.Values.isInteger; +import static com.google.firebase.firestore.model.Values.isInt64Value; import static com.google.firebase.firestore.util.Assert.fail; import static com.google.firebase.firestore.util.Assert.hardAssert; @@ -44,10 +44,10 @@ public Value applyToLocalView(@Nullable Value previousValue, Timestamp localWrit Value baseValue = computeBaseValue(previousValue); // Return an integer value only if the previous value and the operand is an integer. - if (isInteger(baseValue) && isInteger(operand)) { + if (isInt64Value(baseValue) && isInt64Value(operand)) { long sum = safeIncrement(baseValue.getIntegerValue(), operandAsLong()); return Value.newBuilder().setIntegerValue(sum).build(); - } else if (isInteger(baseValue)) { + } else if (isInt64Value(baseValue)) { double sum = baseValue.getIntegerValue() + operandAsDouble(); return Value.newBuilder().setDoubleValue(sum).build(); } else { @@ -102,7 +102,7 @@ private long safeIncrement(long x, long y) { private double operandAsDouble() { if (isDouble(operand)) { return operand.getDoubleValue(); - } else if (isInteger(operand)) { + } else if (isInt64Value(operand)) { return operand.getIntegerValue(); } else { throw fail( @@ -114,7 +114,7 @@ private double operandAsDouble() { private long operandAsLong() { if (isDouble(operand)) { return (long) operand.getDoubleValue(); - } else if (isInteger(operand)) { + } else if (isInt64Value(operand)) { return operand.getIntegerValue(); } else { throw fail( diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java index 6e0df1e6d4a..385e7e3bd2c 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java @@ -20,13 +20,21 @@ import android.net.Uri; import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.DocumentId; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.Exclude; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.IgnoreExtraProperties; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; import com.google.firebase.firestore.PropertyName; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.ServerTimestamp; import com.google.firebase.firestore.ThrowOnExtraProperties; import com.google.firebase.firestore.VectorValue; @@ -175,7 +183,15 @@ private static Object serialize(T o, ErrorPath path) { || o instanceof Blob || o instanceof DocumentReference || o instanceof FieldValue - || o instanceof VectorValue) { + || o instanceof VectorValue + || o instanceof MinKey + || o instanceof MaxKey + || o instanceof RegexValue + || o instanceof Int32Value + || o instanceof Decimal128Value + || o instanceof BsonTimestamp + || o instanceof BsonObjectId + || o instanceof BsonBinaryData) { return o; } else if (o instanceof Uri || o instanceof URI || o instanceof URL) { return o.toString(); @@ -245,6 +261,22 @@ private static T deserializeToClass(Object o, Class clazz, DeserializeCon return (T) convertDocumentReference(o, context); } else if (VectorValue.class.isAssignableFrom(clazz)) { return (T) convertVectorValue(o, context); + } else if (Int32Value.class.isAssignableFrom(clazz)) { + return (T) convertInt32Value(o, context); + } else if (Decimal128Value.class.isAssignableFrom(clazz)) { + return (T) convertDecimal128Value(o, context); + } else if (BsonTimestamp.class.isAssignableFrom(clazz)) { + return (T) convertBsonTimestamp(o, context); + } else if (BsonObjectId.class.isAssignableFrom(clazz)) { + return (T) convertBsonObjectId(o, context); + } else if (BsonBinaryData.class.isAssignableFrom(clazz)) { + return (T) convertBsonBinaryData(o, context); + } else if (MinKey.class.isAssignableFrom(clazz)) { + return (T) convertMinKey(o, context); + } else if (MaxKey.class.isAssignableFrom(clazz)) { + return (T) convertMaxKey(o, context); + } else if (RegexValue.class.isAssignableFrom(clazz)) { + return (T) convertRegexValue(o, context); } else if (clazz.isArray()) { throw deserializeError( context.errorPath, "Converting to Arrays is not supported, please use Lists instead"); @@ -542,6 +574,87 @@ private static VectorValue convertVectorValue(Object o, DeserializeContext conte } } + private static Int32Value convertInt32Value(Object o, DeserializeContext context) { + if (o instanceof Int32Value) { + return (Int32Value) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to Int32Value"); + } + } + + private static Decimal128Value convertDecimal128Value(Object o, DeserializeContext context) { + if (o instanceof Decimal128Value) { + return (Decimal128Value) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to Decimal128Value"); + } + } + + private static BsonTimestamp convertBsonTimestamp(Object o, DeserializeContext context) { + if (o instanceof BsonTimestamp) { + return (BsonTimestamp) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonTimestamp"); + } + } + + private static BsonObjectId convertBsonObjectId(Object o, DeserializeContext context) { + if (o instanceof BsonObjectId) { + return (BsonObjectId) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonObjectId"); + } + } + + private static BsonBinaryData convertBsonBinaryData(Object o, DeserializeContext context) { + + if (o instanceof BsonBinaryData) { + return (BsonBinaryData) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonBinaryData"); + } + } + + private static RegexValue convertRegexValue(Object o, DeserializeContext context) { + if (o instanceof RegexValue) { + return (RegexValue) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to RegexValue"); + } + } + + private static MinKey convertMinKey(Object o, DeserializeContext context) { + if (o instanceof MinKey) { + return (MinKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MinKey"); + } + } + + private static MaxKey convertMaxKey(Object o, DeserializeContext context) { + if (o instanceof MaxKey) { + return (MaxKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MaxKey"); + } + } + private static DocumentReference convertDocumentReference(Object o, DeserializeContext context) { if (o instanceof DocumentReference) { return (DocumentReference) o; diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java index 2cc39337002..bc9c631e6e6 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java @@ -23,6 +23,7 @@ import com.google.firebase.firestore.FieldPath; import com.google.firebase.firestore.FirebaseFirestoreException; import com.google.firebase.firestore.FirebaseFirestoreException.Code; +import com.google.firebase.firestore.Quadruple; import com.google.protobuf.ByteString; import io.grpc.Status; import io.grpc.StatusException; @@ -145,6 +146,11 @@ public static int compareMixed(double doubleValue, long longValue) { return NumberComparisonHelper.firestoreCompareDoubleWithLong(doubleValue, longValue); } + /** Utility function to compare Quadruples (using Firestore semantics for NaN).*/ + public static int compareQuadruples(Quadruple left, Quadruple right) { + return NumberComparisonHelper.firestoreCompareQuadruples(left, right); + } + public static > Comparator comparator() { return Comparable::compareTo; } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..f7cb4191d9f --- /dev/null +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,240 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.Assert.assertThrows; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.annotation.Config; + +@RunWith(RobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public class BsonTypesTest { + + @Test + public void testBsonObjectIdEquality() { + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId differentObjectId = new BsonObjectId("507f191e810c19729de860eb"); + + assertEquals(bsonObjectId, bsonObjectIdDup); + assertNotEquals(bsonObjectId, differentObjectId); + assertNotEquals(bsonObjectIdDup, differentObjectId); + + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + } + + @Test + public void testBsonTimeStampEquality() { + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); + BsonTimestamp differentSecondsTimestamp = new BsonTimestamp(2, 2); + BsonTimestamp differentIncrementTimestamp = new BsonTimestamp(1, 3); + + assertEquals(bsonTimestamp, bsonTimestampDup); + assertNotEquals(bsonTimestamp, differentSecondsTimestamp); + assertNotEquals(bsonTimestamp, differentIncrementTimestamp); + assertNotEquals(bsonTimestampDup, differentSecondsTimestamp); + assertNotEquals(bsonTimestampDup, differentIncrementTimestamp); + + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + } + + @Test + public void testBsonBinaryDataEquality() { + BsonBinaryData bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDataDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData differentSubtypeBinaryData = BsonBinaryData.fromBytes(2, new byte[] {1, 2, 3}); + BsonBinaryData differentDataBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}); + + assertEquals(bsonBinaryData, bsonBinaryDataDup); + assertNotEquals(bsonBinaryData, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryData, differentDataBinaryData); + assertNotEquals(bsonBinaryDataDup, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryDataDup, differentDataBinaryData); + + assertEquals(bsonBinaryData.hashCode(), bsonBinaryDataDup.hashCode()); + } + + @Test + public void testRegexEquality() { + RegexValue regex = new RegexValue("^foo", "i"); + RegexValue regexDup = new RegexValue("^foo", "i"); + RegexValue differentPatternRegex = new RegexValue("^bar", "i"); + RegexValue differentOptionsRegex = new RegexValue("^foo", "m"); + + assertEquals(regex, regexDup); + assertNotEquals(regex, differentPatternRegex); + assertNotEquals(regex, differentOptionsRegex); + assertNotEquals(regexDup, differentPatternRegex); + assertNotEquals(regexDup, differentOptionsRegex); + + assertEquals(regex.hashCode(), regexDup.hashCode()); + } + + @Test + public void testInt32Equality() { + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = new Int32Value(1); + Int32Value differentInt32 = new Int32Value(2); + + assertEquals(int32, int32Dup); + assertNotEquals(int32, differentInt32); + assertNotEquals(int32Dup, differentInt32); + + assertEquals(int32.hashCode(), int32Dup.hashCode()); + } + + @Test + public void testDecimal128Equality() { + Decimal128Value decimal128 = new Decimal128Value("1.2e3"); + Decimal128Value decimal128Dup = new Decimal128Value("1.2e3"); + Decimal128Value differentDecimal128 = new Decimal128Value("1.3e3"); + assertEquals(decimal128, decimal128Dup); + assertNotEquals(decimal128, differentDecimal128); + assertEquals(decimal128.hashCode(), decimal128Dup.hashCode()); + + Decimal128Value dZeroPointFive = new Decimal128Value("0.5"); + Decimal128Value dHalf = new Decimal128Value(".5"); + Decimal128Value dFiveEminusOne = new Decimal128Value("5e-1"); + assertEquals(dZeroPointFive, dHalf); + assertEquals(dZeroPointFive.hashCode(), dHalf.hashCode()); + assertEquals(dZeroPointFive, dFiveEminusOne); + assertEquals(dZeroPointFive.hashCode(), dFiveEminusOne.hashCode()); + + Decimal128Value dOne = new Decimal128Value("1"); + Decimal128Value dOnePointZero = new Decimal128Value("1.0"); + Decimal128Value dOnePointZeroZero = new Decimal128Value("1.00"); + assertEquals(dOne, dOnePointZero); + assertEquals(dOne.hashCode(), dOnePointZero.hashCode()); + assertEquals(dOnePointZero, dOnePointZeroZero); + assertEquals(dOnePointZero.hashCode(), dOnePointZeroZero.hashCode()); + + // numerical equality with different string representation + Decimal128Value dTwelveHundred_1_2e3 = new Decimal128Value("1.2e3"); + Decimal128Value dTwelveHundred_12e2 = new Decimal128Value("12e2"); + Decimal128Value dTwelveHundred_0_12e4 = new Decimal128Value("0.12e4"); + Decimal128Value dTwelveHundred_12000eMinus1 = new Decimal128Value("12000e-1"); + Decimal128Value dOnePointTwo = new Decimal128Value("1.2"); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_12e2); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_12e2.hashCode()); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_0_12e4); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_0_12e4.hashCode()); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_12000eMinus1); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_12000eMinus1.hashCode()); + assertNotEquals(dTwelveHundred_1_2e3, dOnePointTwo); + + // Edge Cases: Zero + Decimal128Value positiveZero = new Decimal128Value("0"); + Decimal128Value negativeZero = new Decimal128Value("-0"); + Decimal128Value anotherPositiveZero = new Decimal128Value("+0"); + Decimal128Value zeroWithDecimal = new Decimal128Value("0.0"); + Decimal128Value negativeZeroWithDecimal = new Decimal128Value("-0.0"); + Decimal128Value positiveZeroWithDecimal = new Decimal128Value("+0.0"); + Decimal128Value zeroWithLeadingAndTrailingZeros = new Decimal128Value("00.00"); + Decimal128Value negativeZeroWithLeadingAndTrailingZeros = new Decimal128Value("-00.000"); + Decimal128Value negativeZeroWithExponent = new Decimal128Value("-00.000e-10"); + Decimal128Value negativeZeroWithZeroExponent = new Decimal128Value("-00.000e-0"); + Decimal128Value zeroWithExponent = new Decimal128Value("00.000e10"); + assertEquals(positiveZero, negativeZero); + assertEquals(positiveZero.hashCode(), negativeZero.hashCode()); + assertEquals(positiveZero, anotherPositiveZero); + assertEquals(positiveZero.hashCode(), anotherPositiveZero.hashCode()); + assertEquals(positiveZero, zeroWithDecimal); + assertEquals(positiveZero.hashCode(), zeroWithDecimal.hashCode()); + assertEquals(positiveZero, negativeZeroWithDecimal); + assertEquals(positiveZero.hashCode(), negativeZeroWithDecimal.hashCode()); + assertEquals(positiveZero, positiveZeroWithDecimal); + assertEquals(positiveZero.hashCode(), positiveZeroWithDecimal.hashCode()); + assertEquals(positiveZero, zeroWithLeadingAndTrailingZeros); + assertEquals(positiveZero.hashCode(), zeroWithLeadingAndTrailingZeros.hashCode()); + assertEquals(positiveZero, negativeZeroWithLeadingAndTrailingZeros); + assertEquals(positiveZero.hashCode(), negativeZeroWithLeadingAndTrailingZeros.hashCode()); + assertEquals(positiveZero, negativeZeroWithExponent); + assertEquals(positiveZero.hashCode(), negativeZeroWithExponent.hashCode()); + assertEquals(positiveZero, negativeZeroWithZeroExponent); + assertEquals(positiveZero.hashCode(), negativeZeroWithZeroExponent.hashCode()); + assertEquals(positiveZero, zeroWithExponent); + assertEquals(positiveZero.hashCode(), zeroWithExponent.hashCode()); + + // Infinity + Decimal128Value positiveInfinity = new Decimal128Value("Infinity"); + Decimal128Value negInfinity = new Decimal128Value("-Infinity"); + Decimal128Value anotherPositiveInfinity = new Decimal128Value("Infinity"); + assertEquals(positiveInfinity, anotherPositiveInfinity); + assertEquals(positiveInfinity.hashCode(), anotherPositiveInfinity.hashCode()); + assertNotEquals(positiveInfinity, negInfinity); + + // NaN + Decimal128Value nan1 = new Decimal128Value("NaN"); + Decimal128Value nan2 = new Decimal128Value("NaN"); + assertEquals(nan1, nan2); + assertEquals(nan1.hashCode(), nan2.hashCode()); + + assertNotEquals(nan1, dOne); + assertNotEquals(nan1, positiveInfinity); + + // Large Numbers + Decimal128Value largeNum1 = + new Decimal128Value("123456789012345678901234567890.123456789012345678901234567890"); + Decimal128Value largeNum2 = + new Decimal128Value("1.23456789012345678901234567890123456789012345678901234567890e29"); + assertEquals(largeNum1, largeNum2); + assertEquals(largeNum1.hashCode(), largeNum2.hashCode()); + + // Small Numbers + Decimal128Value smallNum1 = + new Decimal128Value( + "0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"); + Decimal128Value smallNum2 = new Decimal128Value("1.0e-100"); + assertEquals(smallNum1, smallNum2); + assertEquals(smallNum1.hashCode(), smallNum2.hashCode()); + } + + @Test + public void testMaxKeyIsSingleton() { + MaxKey maxKey = MaxKey.instance(); + MaxKey maxKeyDup = MaxKey.instance(); + assertEquals(maxKey, maxKeyDup); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + } + + @Test + public void testMinKeyIsSingleton() { + MinKey minKey = MinKey.instance(); + MinKey minKeyDup = MinKey.instance(); + assertEquals(minKey, minKeyDup); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + } + + @Test + public void testMinKeyMaxKeyNullNotEqual() { + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + assertNotEquals(minKey, maxKey); + assertNotEquals(minKey, null); + assertNotEquals(maxKey, null); + } + + @Test + public void testThrows() { + assertThrows( + IllegalArgumentException.class, () -> BsonBinaryData.fromBytes(256, new byte[] {1})); + } +} diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java index 7540c06d2e5..32fefe7de0f 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java @@ -32,6 +32,23 @@ public void testEquals() { FieldValue deleteDup = FieldValue.delete(); FieldValue serverTimestamp = FieldValue.serverTimestamp(); FieldValue serverTimestampDup = FieldValue.serverTimestamp(); + RegexValue regex = new RegexValue("pattern", "options"); + RegexValue regexDup = new RegexValue("pattern", "options"); + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = new Int32Value(1); + Decimal128Value decimal128 = new Decimal128Value("1.2e3"); + Decimal128Value decimal128Dup = new Decimal128Value("1.2e3"); + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); + BsonBinaryData bsonBinary = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + MinKey minKey = MinKey.instance(); + MinKey minKeyDup = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + MaxKey maxKeyDup = MaxKey.instance(); + assertEquals(delete, deleteDup); assertEquals(serverTimestamp, serverTimestampDup); assertNotEquals(delete, serverTimestamp); @@ -39,5 +56,95 @@ public void testEquals() { assertEquals(delete.hashCode(), deleteDup.hashCode()); assertEquals(serverTimestamp.hashCode(), serverTimestampDup.hashCode()); assertNotEquals(delete.hashCode(), serverTimestamp.hashCode()); + + // BSON types + assertEquals(regex, regexDup); + assertEquals(int32, int32Dup); + assertEquals(decimal128, decimal128Dup); + assertEquals(bsonTimestamp, bsonTimestampDup); + assertEquals(bsonObjectId, bsonObjectIdDup); + assertEquals(bsonBinary, bsonBinaryDup); + assertEquals(minKey, minKeyDup); + assertEquals(maxKey, maxKeyDup); + assertNotEquals(delete, serverTimestamp); + + // BSON types are not equal to each other + assertNotEquals(regex, int32); + assertNotEquals(regex, decimal128); + assertNotEquals(regex, bsonTimestamp); + assertNotEquals(regex, bsonObjectId); + assertNotEquals(regex, bsonBinary); + assertNotEquals(regex, minKey); + assertNotEquals(regex, maxKey); + + assertNotEquals(int32, decimal128); + assertNotEquals(int32, bsonTimestamp); + assertNotEquals(int32, bsonObjectId); + assertNotEquals(int32, bsonBinary); + assertNotEquals(int32, minKey); + assertNotEquals(int32, maxKey); + + assertNotEquals(decimal128, bsonTimestamp); + assertNotEquals(decimal128, bsonObjectId); + assertNotEquals(decimal128, bsonBinary); + assertNotEquals(decimal128, minKey); + assertNotEquals(decimal128, maxKey); + + assertNotEquals(bsonTimestamp, bsonObjectId); + assertNotEquals(bsonTimestamp, bsonBinary); + assertNotEquals(bsonTimestamp, minKey); + assertNotEquals(bsonTimestamp, maxKey); + + assertNotEquals(bsonObjectId, bsonBinary); + assertNotEquals(bsonObjectId, minKey); + assertNotEquals(bsonObjectId, maxKey); + + assertNotEquals(minKey, maxKey); + + // BSON types hash codes + assertEquals(regex.hashCode(), regexDup.hashCode()); + assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertEquals(decimal128.hashCode(), decimal128Dup.hashCode()); + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + assertEquals(bsonBinary.hashCode(), bsonBinaryDup.hashCode()); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + + // BSON types hash codes are not equal to each other + assertNotEquals(regex.hashCode(), int32.hashCode()); + assertNotEquals(regex.hashCode(), decimal128.hashCode()); + assertNotEquals(regex.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(regex.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(regex.hashCode(), bsonBinary.hashCode()); + assertNotEquals(regex.hashCode(), minKey.hashCode()); + assertNotEquals(regex.hashCode(), maxKey.hashCode()); + + assertNotEquals(int32.hashCode(), decimal128.hashCode()); + assertNotEquals(int32.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(int32.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(int32.hashCode(), bsonBinary.hashCode()); + assertNotEquals(int32.hashCode(), minKey.hashCode()); + assertNotEquals(int32.hashCode(), maxKey.hashCode()); + + assertNotEquals(decimal128.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(decimal128.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(decimal128.hashCode(), bsonBinary.hashCode()); + assertNotEquals(decimal128.hashCode(), minKey.hashCode()); + assertNotEquals(decimal128.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonTimestamp.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), minKey.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonObjectId.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), minKey.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonBinary.hashCode(), minKey.hashCode()); + assertNotEquals(bsonBinary.hashCode(), maxKey.hashCode()); + + assertNotEquals(minKey.hashCode(), maxKey.hashCode()); } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java index a856f316ff1..942ec99ee28 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java @@ -34,6 +34,7 @@ import com.google.firebase.firestore.model.Values; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -220,6 +221,101 @@ public void testConvertsGeoPointValue() { } } + @Test + public void testConvertsBsonObjectIdValue() { + List testCases = asList(new BsonObjectId("foo"), new BsonObjectId("bar")); + for (BsonObjectId p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonTimestampValue() { + List testCases = asList(new BsonTimestamp(1, 2), new BsonTimestamp(3, 4)); + for (BsonTimestamp p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonBinaryValue() { + List testCases = + asList( + BsonBinaryData.fromBytes(1, new byte[] {1, 2}), + BsonBinaryData.fromByteString(1, ByteString.EMPTY), + BsonBinaryData.fromBytes(1, new byte[] {1, 2})); + for (BsonBinaryData p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsRegexValue() { + List testCases = asList(new RegexValue("^foo", "i"), new RegexValue("^bar", "g")); + for (RegexValue p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsInt32Value() { + List testCases = + asList(new Int32Value(1), new Int32Value(-1), new Int32Value(0), new Int32Value(123)); + for (Int32Value p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsDecimal128Value() { + List testCases = + asList( + new Decimal128Value("-1.2e3"), + new Decimal128Value("1.2e3"), + new Decimal128Value("1.3e3"), + new Decimal128Value("NaN"), + new Decimal128Value("-Infinity"), + new Decimal128Value("Infinity"), + new Decimal128Value("4.2e+3"), + new Decimal128Value("-4.2e-3"), + new Decimal128Value("-0")); + for (Decimal128Value p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMinKey() { + List testCases = asList(MinKey.instance(), MinKey.instance()); + for (MinKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMaxKey() { + List testCases = asList(MaxKey.instance(), MaxKey.instance()); + for (MaxKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + @Test public void testConvertsEmptyObjects() { assertEquals(wrapObject(), new ObjectValue()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java index 355833a6586..3983a08c0e2 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java @@ -30,6 +30,7 @@ import com.google.firebase.firestore.model.ObjectValue; import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.model.SnapshotVersion; +import com.google.firebase.firestore.model.Values; import com.google.firebase.firestore.remote.RemoteSerializer; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.ArrayValue; @@ -215,6 +216,121 @@ public void testDecodesReferenceValues() throws JSONException { assertDecodesValue(json, proto.build()); } + @Test + public void testDecodesBsonObjectIdValues() throws JSONException { + String json = "{ mapValue: { fields: { __oid__: { stringValue: 'foo' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("foo").build())); + + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonTimestampValues() throws JSONException { + String json = + "{ mapValue: { fields: { __request_timestamp__: { mapValue: { fields: { seconds: { integerValue: 12345 }, increment: { integerValue: 67 } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(67).build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonBinaryDataValues() throws JSONException { + String json = "{ mapValue: { fields: { __binary__: { bytesValue: 'AAECAw==' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder().setBytesValue(TestUtil.byteString(0, 1, 2, 3)).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesRegexValues() throws JSONException { + String json = + "{ mapValue: { fields: { __regex__: { mapValue: { fields: { pattern: { stringValue: '^foo' }, options: { stringValue: 'i' } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("i").build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesInt32Values() throws JSONException { + String json = "{ mapValue: { fields: { __int__: { integerValue: 12345 } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(12345).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesDecimal128Values() throws JSONException { + String json = "{ mapValue: { fields: { __decimal128__: { stringValue: '1.2e3' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_DECIMAL128_KEY, + Value.newBuilder().setStringValue("1.2e3").build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMinKey() throws JSONException { + String json = "{ mapValue: { fields: { __min__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMaxKey() throws JSONException { + String json = "{ mapValue: { fields: { __max__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + @Test public void testDecodesArrayValues() throws JSONException { String json = diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java index cdc932bfa01..65390aa8054 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java @@ -34,7 +34,15 @@ import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -839,6 +847,33 @@ public void testCanonicalIdsAreStable() { "collection|f:|ob:aasc__name__asc|ub:a:foo,[1,2,3]"); assertCanonicalId(baseQuery.limitToFirst(5), "collection|f:|ob:__name__asc|l:5"); assertCanonicalId(baseQuery.limitToLast(5), "collection|f:|ob:__name__desc|l:5"); + + // BSON types + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new BsonObjectId("foo"))), + "collection|f:a<={__oid__:foo}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))), + "collection|f:a<={__binary__:01010203}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new BsonTimestamp(1, 2))), + "collection|f:a<={__request_timestamp__:{increment:2,seconds:1}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new RegexValue("^foo", "i"))), + "collection|f:a<={__regex__:{options:i,pattern:^foo}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new Int32Value(1))), + "collection|f:a<={__int__:1}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new Decimal128Value("1.2e3"))), + "collection|f:a<={__decimal128__:1.2e3}|ob:aasc__name__asc"); + + assertCanonicalId( + baseQuery.filter(filter("a", "<=", MinKey.instance())), + "collection|f:a<={__min__:null}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", MaxKey.instance())), + "collection|f:a<={__max__:null}|ob:aasc__name__asc"); } private void assertCanonicalId(Query query, String expectedCanonicalId) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java index bad5ee427fa..f5d89ef55d8 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java @@ -151,12 +151,12 @@ public void orderByQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -183,7 +183,7 @@ public void startAtQueryBound() { Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -259,7 +259,7 @@ public void endAtQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java index 6acb576666a..318e9137ee1 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java @@ -20,6 +20,7 @@ import com.google.firebase.firestore.model.DatabaseId; import com.google.firebase.firestore.model.FieldIndex; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.concurrent.ExecutionException; import org.junit.Assert; import org.junit.Test; @@ -100,4 +101,389 @@ public void writeIndexValueSupportsEmptyVector() { // Assert actual and expected encodings are equal Assert.assertArrayEquals(actualBytes, expectedBytes); } + + @Test + public void writeIndexValueSupportsBsonObjectId() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonObjectId("507f191e810c19729de860ea")); + + // Encode an actual ObjectIdValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_OBJECT_ID); // ObjectId type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom("507f191e810c19729de860ea".getBytes())); // ObjectId value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1, 1, 2, 3})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryWithEmptyData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(1, 2)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(1L << 32 | 2 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(4294967295L, 4294967295L)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong( + 4294967295L << 32 | 4294967295L & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new BsonTimestamp(0, 0)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(0L << 32 | 0 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsRegex() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new RegexValue("^foo", "i")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.INDEX_TYPE_REGEX); // Regex type + expectedDirectionalEncoder.writeString("^foo"); // Regex pattern + expectedDirectionalEncoder.writeString("i"); // Regex options + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.NOT_TRUNCATED); // writeTruncationMarker + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsInt32() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(1)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(1); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(2147483647)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(2147483647); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Int32Value(-2147483648)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(-2147483648); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsDecimal128() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("1.2e3")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("1.2e3")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsNegativeDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("-1.2e3")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("-1.2e3")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSpecialDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("NaN")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NAN); // Number type, special case NaN + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("Infinity")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("Infinity")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("-Infinity")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("-Infinity")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMinKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(MinKey.instance()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MIN_KEY); // MinKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMaxKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(MaxKey.instance()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MAX_KEY); // MaxKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + Assert.assertArrayEquals(actualBytes, expectedBytes); + } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java index 21823b1af42..499a60a734a 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java @@ -63,6 +63,7 @@ import com.google.firebase.firestore.core.Target; import com.google.firebase.firestore.model.Document; import com.google.firebase.firestore.model.DocumentKey; +import com.google.firebase.firestore.model.DocumentSet; import com.google.firebase.firestore.model.FieldIndex; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -111,7 +112,7 @@ public abstract class LocalStoreTestCase { private List batches; private @Nullable ImmutableSortedMap lastChanges; - private @Nullable QueryResult lastQueryResult; + private @Nullable DocumentSet lastQueryResult; private int lastTargetId; abstract Persistence getPersistence(); @@ -214,7 +215,11 @@ protected int allocateQuery(Query query) { protected void executeQuery(Query query) { resetPersistenceStats(); - lastQueryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + QueryResult queryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + lastQueryResult = DocumentSet.emptySet(query.comparator()); + for (Entry entry : queryResult.getDocuments()) { + lastQueryResult = lastQueryResult.add(entry.getValue()); + } } protected void setIndexAutoCreationEnabled(boolean isEnabled) { @@ -310,8 +315,12 @@ private void assertNotContains(String keyPathString) { protected void assertQueryReturned(String... keys) { assertNotNull(lastQueryResult); - ImmutableSortedMap documents = lastQueryResult.getDocuments(); - assertThat(keys(documents)).containsExactly(Arrays.stream(keys).map(TestUtil::key).toArray()); + assertEquals(lastQueryResult.size(), keys.length); + List expectedKeys = + Arrays.stream(keys).map(TestUtil::key).collect(Collectors.toList()); + List actualKeys = + lastQueryResult.toList().stream().map(Document::getKey).collect(Collectors.toList()); + assertEquals(expectedKeys, actualKeys); } private void assertQueryDocumentMapping(int targetId, DocumentKey... keys) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java index 57b0fc36ae4..b0b4c7e9eb7 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java @@ -18,6 +18,7 @@ import static com.google.firebase.firestore.model.FieldIndex.IndexState; import static com.google.firebase.firestore.model.FieldIndex.Segment.Kind; import static com.google.firebase.firestore.testutil.TestUtil.andFilters; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.bound; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -30,12 +31,24 @@ import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.path; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.version; import static com.google.firebase.firestore.testutil.TestUtil.wrap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; +import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.auth.User; import com.google.firebase.firestore.core.Filter; import com.google.firebase.firestore.core.Query; @@ -1233,6 +1246,445 @@ public void TestCreateTargetIndexesUpgradesPartialIndexToFullIndex() { validateIndexType(subQuery2, IndexManager.IndexType.NONE); } + @Test + public void testIndexesBsonObjectId() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query); + } + + @Test + public void testIndexesBsonBinary() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc3"); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); + verifyResults(query); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + verifyResults(query); + } + + @Test + public void testIndexesBsonTimestamp() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc2", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc3", map("key", new BsonTimestamp(2, 1))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1, 1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1, 1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(2, 1))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 1))); + verifyResults(query); + } + + @Test + public void testIndexesRegex() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc2", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc3", map("key", new RegexValue("b", "i"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new RegexValue("a", "i"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new RegexValue("a", "i"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new RegexValue("a", "m"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new RegexValue("b", "i"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new RegexValue("a", "i"))); + verifyResults(query); + } + + @Test + public void testIndexesInt32() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new Int32Value(1))); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(3))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Int32Value(1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Int32Value(1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Int32Value(2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Int32Value(2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Int32Value(2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new Int32Value(2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Int32Value(3))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Int32Value(1))); + verifyResults(query); + } + + @Test + public void testIndexesDecimal128Value() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new Decimal128Value("-1.2e3"))); + addDoc("coll/doc2", map("key", new Decimal128Value("0.0"))); + addDoc("coll/doc3", map("key", new Decimal128Value("1.2e3"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("-1200"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0"))); + verifyResults(query, "coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("-0"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-0.0"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e-3"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e-3"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + verifyResults(query); + } + + @Test + public void testIndexesDecimal128ValueWithPrecisionLoss() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc( + "coll/doc1", + map( + "key", + new Decimal128Value( + "-0.1234567890123456789"))); // will be rounded to -0.12345678901234568 + addDoc("coll/doc2", map("key", new Decimal128Value("0"))); + addDoc( + "coll/doc3", + map( + "key", + new Decimal128Value( + "0.1234567890123456789"))); // will be rounded to 0.12345678901234568 + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.1234567890123456789"))); + verifyResults(query, "coll/doc3"); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 from SQLite DB as + // doc3 rounds to the same number, even though the actual number in doc3 is different + // Unlike SQLiteLocalStoreTest, this returns the doc3 as result + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.12345678901234568"))); + verifyResults(query, "coll/doc3"); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + verifyResults(query, "coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("1.23e-1"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-1.23e-1"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + verifyResults(query); + } + + @Test + public void testIndexesMinKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", MinKey.instance())); + addDoc("coll/doc3", map("key", null)); + addDoc("coll/doc4", map("key", 1)); + addDoc("coll/doc5", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc3", "coll/doc1", "coll/doc2", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "!=", MinKey.instance())); + verifyResults(query, "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", MinKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", MinKey.instance())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", MinKey.instance())); + verifyResults(query); + } + + @Test + public void testIndexesMaxKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", 1)); + addDoc("coll/doc3", map("key", MaxKey.instance())); + addDoc("coll/doc4", map("key", MaxKey.instance())); + addDoc("coll/doc5", map("key", null)); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc5", "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "==", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", ">", MaxKey.instance())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", MaxKey.instance())); + verifyResults(query); + } + + @Test + public void testIndexFieldsOfBsonTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(-1))); + addDoc("coll/doc4", map("key", new Decimal128Value("1.2e3"))); + addDoc("coll/doc5", map("key", new Decimal128Value("-0.0"))); + addDoc("coll/doc6", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc7", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc8", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc9", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc10", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc11", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc12", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc13", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc14", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/doc14", // maxKey + "coll/doc12", // regex m + "coll/doc13", // regex i + "coll/doc10", // objectId eb + "coll/doc11", // objectId ea + "coll/doc8", // binary [1,2,4] + "coll/doc9", // binary [1,2,3] + "coll/doc6", // timestamp 1,2 + "coll/doc7", // timestamp 1,1 + "coll/doc4", // decimal128 1200 + "coll/doc2", // int32 2 + "coll/doc5", // decimal128 -0.0 + "coll/doc3", // int32 -1 + "coll/doc1" // minKey + ); + } + + @Test + public void testIndexFieldsOfAllTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/a", map("key", null)); + addDoc("coll/b", map("key", MinKey.instance())); + addDoc("coll/c", map("key", true)); + addDoc("coll/d", map("key", Double.NaN)); + addDoc("coll/e", map("key", new Int32Value(1))); + addDoc("coll/f", map("key", 2.0)); + addDoc("coll/g", map("key", 3L)); + addDoc("coll/h", map("key", new Decimal128Value("1.2e3"))); + addDoc("coll/i", map("key", new Timestamp(100, 123456000))); + addDoc("coll/j", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/k", map("key", "string")); + addDoc("coll/l", map("key", blob(1, 2, 3))); + addDoc("coll/m", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/n", map("key", ref("foo/bar"))); + addDoc("coll/o", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/p", map("key", new GeoPoint(0, 1))); + addDoc("coll/q", map("key", new RegexValue("^foo", "i"))); + addDoc("coll/r", map("key", Arrays.asList(1, 2))); + addDoc("coll/s", map("key", FieldValue.vector(new double[] {1, 2, 3}))); + addDoc("coll/t", map("key", map("a", 1))); + addDoc("coll/u", map("key", MaxKey.instance())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/u", // maxKey + "coll/t", // map + "coll/s", // vector + "coll/r", // array + "coll/q", // regex + "coll/p", // geopoint + "coll/o", // objectId + "coll/n", // reference + "coll/m", // bsonBinary + "coll/l", // bytes + "coll/k", // string + "coll/j", // bsonTimestamp + "coll/i", // timestamp + "coll/h", // Number decimal128 + "coll/g", // Number long + "coll/f", // Number double + "coll/e", // Number int32 + "coll/d", // Number NaN + "coll/c", // boolean + "coll/b", // minKey + "coll/a" // null + ); + } + private void validateIndexType(Query query, IndexManager.IndexType expected) { IndexManager.IndexType indexType = indexManager.getIndexType(query.toTarget()); assertEquals(indexType, expected); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java index 63569e6dc85..bc8fcc31f4b 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java @@ -16,6 +16,7 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.firebase.firestore.testutil.TestUtil.addedRemoteEvent; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.deleteMutation; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -27,6 +28,7 @@ import static com.google.firebase.firestore.testutil.TestUtil.orFilters; import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.setMutation; import static com.google.firebase.firestore.testutil.TestUtil.updateRemoteEvent; import static com.google.firebase.firestore.testutil.TestUtil.version; @@ -34,7 +36,16 @@ import static java.util.Collections.singletonList; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.Query; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.FieldIndex; @@ -367,6 +378,1032 @@ public void testIndexesVectorValues() { assertQueryReturned("coll/doc4", "coll/doc3"); } + @Test + public void testIndexesBsonObjectId() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation( + setMutation("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + new BsonObjectId("507f191e810c19729de860ea"), + new BsonObjectId("507f191e810c19729de860eb")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesBsonTimestamp() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc2", map("key", new BsonTimestamp(1001, 1000)))); + writeMutation(setMutation("coll/doc3", map("key", new BsonTimestamp(1000, 1001)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1001, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new BsonTimestamp(1000, 1000), new BsonTimestamp(1000, 1001)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesBsonBinary() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); + writeMutation( + setMutation("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc4", map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2})))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 4, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1"); + + query = + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(2, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesRegex() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc2", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc3", map("key", new RegexValue("^foo", "i")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", new RegexValue("^foo", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new RegexValue("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new RegexValue("^bar", "i"), new RegexValue("^foo", "i")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesInt32() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation(setMutation("coll/doc1", map("key", new Int32Value(-1)))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(0)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Int32Value(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Int32Value(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Int32Value(1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Int32Value(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(new Int32Value(-1), new Int32Value(0)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesDecimal128Value() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation(setMutation("coll/doc1", map("key", new Decimal128Value("-1.2e3")))); + writeMutation(setMutation("coll/doc2", map("key", new Decimal128Value("0")))); + writeMutation(setMutation("coll/doc3", map("key", new Decimal128Value("1.2e3")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("-1200"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("-0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + // This will fail if the negative 0s are not converted to positive 0 in `writeIndexValue` + // function + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new Decimal128Value("-1.2e3"), new Decimal128Value("0")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesDecimal128ValueWithPrecisionLoss() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation( + setMutation( + "coll/doc1", + map( + "key", + new Decimal128Value( + "-0.1234567890123456789")))); // will be rounded to -0.12345678901234568 + writeMutation(setMutation("coll/doc2", map("key", new Decimal128Value("0")))); + writeMutation( + setMutation( + "coll/doc3", + map( + "key", + new Decimal128Value( + "0.1234567890123456789")))); // will be rounded to 0.12345678901234568 + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.1234567890123456789"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3"); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 from SQLite DB as + // doc3 rounds to the same number, but, it is not presented on the final query result. + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.12345678901234568"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned(); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("1.23e-1"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-1.23e-1"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + } + + @Test + public void testIndexesMinKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc4", map("key", 1))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "!=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", MinKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(MinKey.instance(), MaxKey.instance()))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc5"); + } + + @Test + public void testIndexesMaxKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", 1))); + writeMutation(setMutation("coll/doc4", map("key", MaxKey.instance()))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", MaxKey.instance())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + } + + @Test + public void testIndexesAllBsonTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.DESCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(2)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(-1)))); + writeMutation(setMutation("coll/doc4", map("key", new Decimal128Value("1.2e3")))); + writeMutation(setMutation("coll/doc5", map("key", new Decimal128Value("-0.0")))); + writeMutation(setMutation("coll/doc6", map("key", new BsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc7", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation( + setMutation("coll/doc8", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc9", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc10", map("key", new BsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc11", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc12", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc13", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc14", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 14, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set, + "coll/doc13", + CountingQueryEngine.OverlayType.Set, + "coll/doc14", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc14", // maxKey + "coll/doc12", // regex m + "coll/doc13", // regex i + "coll/doc10", // objectId eb + "coll/doc11", // objectId ea + "coll/doc8", // binary [1,2,4] + "coll/doc9", // binary [1,2,3] + "coll/doc6", // timestamp 1,2 + "coll/doc7", // timestamp 1,1 + "coll/doc4", // decimal128 1200 + "coll/doc2", // int32 2 + "coll/doc5", // decimal128 -0.0 + "coll/doc3", // int32 -1 + "coll/doc1" // minKey + ); + } + + @Test + public void testIndexesAllTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", true))); + writeMutation(setMutation("coll/doc4", map("key", Double.NaN))); + writeMutation(setMutation("coll/doc5", map("key", new Int32Value(1)))); + writeMutation(setMutation("coll/doc6", map("key", 2.0))); + writeMutation(setMutation("coll/doc7", map("key", 3))); + writeMutation(setMutation("coll/doc8", map("key", new Decimal128Value("1.2e3")))); + writeMutation(setMutation("coll/doc9", map("key", new Timestamp(100, 123456000)))); + writeMutation(setMutation("coll/doc10", map("key", new BsonTimestamp(1, 2)))); + writeMutation(setMutation("coll/doc11", map("key", "string"))); + writeMutation(setMutation("coll/doc12", map("key", blob(1, 2, 3)))); + writeMutation( + setMutation("coll/doc13", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc14", map("key", ref("foo/bar")))); + writeMutation( + setMutation("coll/doc15", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc16", map("key", new GeoPoint(1, 2)))); + writeMutation(setMutation("coll/doc17", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc18", map("key", Arrays.asList(2, "foo")))); + writeMutation(setMutation("coll/doc19", map("key", FieldValue.vector(new double[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc20", map("key", map("bar", 1, "foo", 2)))); + writeMutation(setMutation("coll/doc21", map("key", MaxKey.instance()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 21, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set, + "coll/doc13", + CountingQueryEngine.OverlayType.Set, + "coll/doc14", + CountingQueryEngine.OverlayType.Set, + "coll/doc15", + CountingQueryEngine.OverlayType.Set, + "coll/doc16", + CountingQueryEngine.OverlayType.Set, + "coll/doc17", + CountingQueryEngine.OverlayType.Set, + "coll/doc18", + CountingQueryEngine.OverlayType.Set, + "coll/doc19", + CountingQueryEngine.OverlayType.Set, + "coll/doc20", + CountingQueryEngine.OverlayType.Set, + "coll/doc21", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc1", + "coll/doc2", + "coll/doc3", + "coll/doc4", + "coll/doc5", + "coll/doc6", + "coll/doc7", + "coll/doc8", + "coll/doc9", + "coll/doc10", + "coll/doc11", + "coll/doc12", + "coll/doc13", + "coll/doc14", + "coll/doc15", + "coll/doc16", + "coll/doc17", + "coll/doc18", + "coll/doc19", + "coll/doc20", + "coll/doc21"); + } + @Test public void testIndexesServerTimestamps() { FieldIndex index = @@ -493,7 +1530,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e"); backfillIndexes(); @@ -501,7 +1538,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/f", "coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e", "coll/f"); } @Test @@ -521,7 +1558,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { // SDK will not create indexes since collection size is too small. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); backfillIndexes(); @@ -529,7 +1566,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 3); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/f", "coll/a"); } @Test @@ -598,7 +1635,7 @@ public void testIndexAutoCreationWorksWhenBackfillerRunsHalfway() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 1, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/a", "coll/f", "coll/e"); } @Test @@ -621,7 +1658,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); setIndexAutoCreationEnabled(false); @@ -631,7 +1668,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/a", "coll/f"); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index 6a7dbe9c259..5530c457847 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -23,14 +23,26 @@ import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.wrapRef; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import com.google.common.testing.EqualsTester; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; +import com.google.firebase.firestore.model.Values.MapRepresentation; import com.google.firebase.firestore.testutil.ComparatorTester; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; @@ -64,6 +76,32 @@ public void testValueEquality() { GeoPoint geoPoint2 = new GeoPoint(0, 2); Timestamp timestamp1 = new Timestamp(date1); Timestamp timestamp2 = new Timestamp(date2); + + BsonObjectId objectId1 = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId objectId2 = new BsonObjectId("507f191e810c19729de860eb"); + + BsonBinaryData binaryData1 = BsonBinaryData.fromBytes(1, new byte[] {1, 2}); + BsonBinaryData binaryData2 = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData binaryData3 = BsonBinaryData.fromBytes(2, new byte[] {1, 2}); + + BsonTimestamp bsonTimestamp1 = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestamp2 = new BsonTimestamp(1, 3); + BsonTimestamp bsonTimestamp3 = new BsonTimestamp(2, 2); + + Int32Value int32Value1 = new Int32Value(1); + Int32Value int32Value2 = new Int32Value(2); + + Decimal128Value decimal128Value1 = new Decimal128Value("-1.2e3"); + Decimal128Value decimal128Value2 = new Decimal128Value("0.0"); + Decimal128Value decimal128Value3 = new Decimal128Value("1.2e-3"); + + RegexValue regexValue1 = new RegexValue("^foo", "i"); + RegexValue regexValue2 = new RegexValue("^foo", "m"); + RegexValue regexValue3 = new RegexValue("^bar", "i"); + + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + new EqualsTester() .addEqualityGroup(wrap(true), wrap(true)) .addEqualityGroup(wrap(false), wrap(false)) @@ -108,6 +146,25 @@ public void testValueEquality() { .addEqualityGroup(wrap(map("bar", 2, "foo", 1))) .addEqualityGroup(wrap(map("bar", 1))) .addEqualityGroup(wrap(map("foo", 1))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea")), wrap(objectId1)) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb")), wrap(objectId2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2})), wrap(binaryData1)) + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), wrap(binaryData2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(2, new byte[] {1, 2})), wrap(binaryData3)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 2)), wrap(bsonTimestamp1)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 3)), wrap(bsonTimestamp2)) + .addEqualityGroup(wrap(new BsonTimestamp(2, 2)), wrap(bsonTimestamp3)) + .addEqualityGroup(wrap(new Int32Value(1)), wrap(int32Value1)) + .addEqualityGroup(wrap(new Int32Value(2)), wrap(int32Value2)) + .addEqualityGroup(wrap(new Decimal128Value("-1.2e3")), wrap(decimal128Value1)) + .addEqualityGroup(wrap(new Decimal128Value("0.0")), wrap(decimal128Value2)) + .addEqualityGroup(wrap(new Decimal128Value("1.2e-3")), wrap(decimal128Value3)) + .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(regexValue1)) + .addEqualityGroup(wrap(new RegexValue("^foo", "m")), wrap(regexValue2)) + .addEqualityGroup(wrap(new RegexValue("^bar", "i")), wrap(regexValue3)) + .addEqualityGroup(wrap(MinKey.instance()), wrap(minKey)) + .addEqualityGroup(wrap(MaxKey.instance()), wrap(maxKey)) .testEquals(); } @@ -120,35 +177,68 @@ public void testValueOrdering() { // null first .addEqualityGroup(wrap((Object) null)) + // MinKey is after null + .addEqualityGroup(wrap(MinKey.instance())) + // booleans .addEqualityGroup(wrap(false)) .addEqualityGroup(wrap(true)) - // numbers - .addEqualityGroup(wrap(Double.NaN)) - .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) + // 64-bit and 32-bit numbers order together numerically. + .addEqualityGroup(wrap(Double.NaN), wrap(new Decimal128Value("NaN"))) + .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY), wrap(new Decimal128Value("-Infinity"))) .addEqualityGroup(wrap(-Double.MAX_VALUE)) - .addEqualityGroup(wrap(Long.MIN_VALUE)) - .addEqualityGroup(wrap(-1.1)) - .addEqualityGroup(wrap(-1.0)) + .addEqualityGroup(wrap(Long.MIN_VALUE), wrap(new Decimal128Value("-9223372036854775808"))) + .addEqualityGroup( + wrap(new Int32Value(-2147483648)), + wrap(Integer.MIN_VALUE), + wrap(new Decimal128Value("-2147483648"))) + // Note: decimal 128 would have equality issue with other number types if the value doesn't + // have a 2's complement representation, e.g, 1.1. This is expected. + .addEqualityGroup(wrap(-1.5), wrap(new Decimal128Value("-1.5"))) + .addEqualityGroup(wrap(-1.0), wrap(new Decimal128Value("-1.0"))) .addEqualityGroup(wrap(-Double.MIN_NORMAL)) .addEqualityGroup(wrap(-Double.MIN_VALUE)) // Zeros all compare the same. - .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L)) + .addEqualityGroup( + wrap(-0.0), + wrap(0.0), + wrap(0L), + wrap(new Int32Value(0)), + wrap(new Decimal128Value("0")), + wrap(new Decimal128Value("0.0")), + wrap(new Decimal128Value("-0")), + wrap(new Decimal128Value("-0.0")), + wrap(new Decimal128Value("+0")), + wrap(new Decimal128Value("+0.0"))) .addEqualityGroup(wrap(Double.MIN_VALUE)) .addEqualityGroup(wrap(Double.MIN_NORMAL)) - .addEqualityGroup(wrap(0.1)) - // Doubles and Longs compareTo() the same. - .addEqualityGroup(wrap(1.0), wrap(1L)) + .addEqualityGroup(wrap(0.5), wrap(new Decimal128Value("0.5"))) + // Doubles, Longs, Int32Values compareTo() the same. + .addEqualityGroup( + wrap(1.0), + wrap(1L), + wrap(new Int32Value(1)), + wrap(new Decimal128Value("1")), + wrap(new Decimal128Value("1.0"))) .addEqualityGroup(wrap(1.1)) + .addEqualityGroup( + wrap(new Int32Value(2147483647)), + wrap(Integer.MAX_VALUE), + wrap(new Decimal128Value("2.147483647e9"))) .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.MAX_VALUE)) - .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) + .addEqualityGroup(wrap(Double.POSITIVE_INFINITY), wrap(new Decimal128Value("Infinity"))) // dates .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(date2)) + // bson timestamps + .addEqualityGroup(wrap(new BsonTimestamp(123, 4))) + .addEqualityGroup(wrap(new BsonTimestamp(123, 5))) + .addEqualityGroup(wrap(new BsonTimestamp(124, 0))) + // server timestamps come after all concrete timestamps. .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date1), null))) .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date2), null))) @@ -172,6 +262,15 @@ public void testValueOrdering() { .addEqualityGroup(wrap(blob(0, 1, 2, 4, 3))) .addEqualityGroup(wrap(blob(255))) + // bson binary data + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {})), + wrap(BsonBinaryData.fromByteString(1, ByteString.EMPTY))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {0}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2, 3}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(7, new byte[] {1}))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc2")))) @@ -180,6 +279,15 @@ public void testValueOrdering() { .addEqualityGroup(wrap(wrapRef(dbId("p1", "d2"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p2", "d1"), key("c1/doc1")))) + // bson object id + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea"))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new BsonObjectId("e\u0301b"))) + .addEqualityGroup(wrap(new BsonObjectId("æ"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new BsonObjectId("\u00e9a"))) + // geo points .addEqualityGroup(wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) @@ -194,6 +302,16 @@ public void testValueOrdering() { .addEqualityGroup(wrap(new GeoPoint(90, 0))) .addEqualityGroup(wrap(new GeoPoint(90, 180))) + // regex + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + .addEqualityGroup(wrap(new RegexValue("^foo", "m"))) + .addEqualityGroup(wrap(new RegexValue("^zoo", "i"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new RegexValue("e\u0301b", "i"))) + .addEqualityGroup(wrap(new RegexValue("æ", "i"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new RegexValue("\u00e9a", "i"))) + // arrays .addEqualityGroup(wrap(Arrays.asList("bar"))) .addEqualityGroup(wrap(Arrays.asList("foo", 1))) @@ -212,21 +330,33 @@ public void testValueOrdering() { .addEqualityGroup(wrap(map("foo", 1))) .addEqualityGroup(wrap(map("foo", 2))) .addEqualityGroup(wrap(map("foo", "0"))) + + // MaxKey is last + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @Test public void testLowerBound() { new ComparatorTester() - // null first + // lower bound of null is null .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap((Object) null))), wrap((Object) null)) + // lower bound of MinKey is MinKey + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(MinKey.instance()))), wrap(MinKey.instance())) + // booleans .addEqualityGroup(wrap(false), wrap(getLowerBound(TestUtil.wrap(true)))) .addEqualityGroup(wrap(true)) // numbers - .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(1.0))), wrap(Double.NaN)) + // Note: 32-bit,64-bit integers and 128-bit decimals shares the same lower bound + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(1.0))), + wrap(Double.NaN), + wrap(getLowerBound(TestUtil.wrap(new Int32Value(1)))), + wrap(getLowerBound(TestUtil.wrap(new Decimal128Value("1"))))) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(Long.MIN_VALUE)) @@ -234,6 +364,12 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(date1)))) .addEqualityGroup(wrap(date1)) + // bson timestamps + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonTimestamp(4294967295L, 4294967295L)))), + wrap(new BsonTimestamp(0, 0))) + .addEqualityGroup(wrap(new BsonTimestamp(1, 1))) + // strings .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap("foo"))), wrap("")) .addEqualityGroup(wrap("\000")) @@ -242,17 +378,35 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(blob(1, 2, 3)))), wrap(blob())) .addEqualityGroup(wrap(blob(0))) + // bson binary data + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})))), + wrap(BsonBinaryData.fromBytes(0, new byte[] {})), + wrap(BsonBinaryData.fromByteString((byte) 0, ByteString.EMPTY))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(0, new byte[] {0}))) + // resource names .addEqualityGroup( wrap(getLowerBound(wrapRef(dbId("foo", "bar"), key("x/y")))), wrap(wrapRef(dbId("", ""), key("")))) .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) + // bson object ids + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonObjectId("zzz")))), wrap(new BsonObjectId(""))) + .addEqualityGroup(wrap(new BsonObjectId("a"))) + // geo points .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(new GeoPoint(-90, 0)))), wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) + // regular expressions + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new RegexValue("^foo", "i")))), + wrap(new RegexValue("", ""))) + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + // arrays .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(Collections.singletonList(false)))), @@ -271,6 +425,9 @@ public void testLowerBound() { // objects .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(map("foo", "bar")))), wrap(map())) + + // maxKey + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @@ -279,21 +436,36 @@ public void testUpperBound() { new ComparatorTester() // null first .addEqualityGroup(wrap((Object) null)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap((Object) null)))) + + // upper value of null is MinKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap((Object) null))), wrap(MinKey.instance())) + + // upper value of MinKey is boolean `false` + .addEqualityGroup(wrap(false), wrap(getUpperBound(TestUtil.wrap(MinKey.instance())))) // booleans .addEqualityGroup(wrap(true)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false)))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false))), wrap(Double.NaN)) // numbers + .addEqualityGroup(wrap(new Int32Value(2147483647))) // largest int32 value .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(1.0)))) + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(0))), + wrap(getUpperBound(TestUtil.wrap(new Int32Value(0)))), + wrap(getUpperBound(TestUtil.wrap(new Decimal128Value("-0.0"))))) // dates .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(date1)))) + // bson timestamps + .addEqualityGroup( + wrap(new BsonTimestamp(4294967295L, 4294967295L))) // largest bson timestamp value + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonTimestamp(1, 1))))) + // strings .addEqualityGroup(wrap("\000")) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap("\000")))) @@ -302,14 +474,27 @@ public void testUpperBound() { .addEqualityGroup(wrap(blob(255))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(blob(255))))) + // bson binary data + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2}))) + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(BsonBinaryData.fromBytes(0, new byte[] {}))))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) .addEqualityGroup(wrap(getUpperBound(wrapRef(dbId("", ""), key("a/a"))))) + // bson object ids + .addEqualityGroup(wrap(new BsonObjectId("zzz"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonObjectId("a"))))) + // geo points .addEqualityGroup(wrap(new GeoPoint(90, 180))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new GeoPoint(90, 180))))) + // regular expressions + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new RegexValue("", ""))))) + // arrays .addEqualityGroup(wrap(Collections.singletonList(false))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(Collections.singletonList(false))))) @@ -325,7 +510,10 @@ public void testUpperBound() { // objects .addEqualityGroup(wrap(map("a", "b"))) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(map("a", "b"))))) + + // upper value of objects is MaxKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(map("a", "b")))), wrap(MaxKey.instance())) .testCompare(); } @@ -346,6 +534,21 @@ public void testCanonicalIds() { assertCanonicalId( TestUtil.wrap(map("a", Arrays.asList("b", map("c", new GeoPoint(30, 60))))), "{a:[b,{c:geo(30.0,60.0)}]}"); + + assertCanonicalId(TestUtil.wrap(new RegexValue("a", "b")), "{__regex__:{options:b,pattern:a}}"); + + assertCanonicalId(TestUtil.wrap(new BsonObjectId("foo")), "{__oid__:foo}"); + assertCanonicalId( + TestUtil.wrap(new BsonTimestamp(1, 2)), "{__request_timestamp__:{increment:2,seconds:1}}"); + assertCanonicalId((TestUtil.wrap(new Int32Value(1))), "{__int__:1}"); + assertCanonicalId(TestUtil.wrap(new Decimal128Value("1.2e3")), "{__decimal128__:1.2e3}"); + assertCanonicalId( + TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); + assertCanonicalId( + TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), + "{__binary__:80010203}"); + assertCanonicalId(TestUtil.wrap(MinKey.instance()), "{__min__:null}"); + assertCanonicalId(TestUtil.wrap(MaxKey.instance()), "{__max__:null}"); } @Test @@ -358,6 +561,113 @@ private void assertCanonicalId(Value proto, String expectedCanonicalId) { assertEquals(expectedCanonicalId, Values.canonicalId(proto)); } + @Test + public void DetectsBsonTypesCorrectly() { + Value minKeyValue = TestUtil.wrap(MinKey.instance()); + Value maxKeyValue = TestUtil.wrap(MaxKey.instance()); + Value int32Value = TestUtil.wrap(new Int32Value(1)); + Value decimal128 = TestUtil.wrap(new Decimal128Value("1.2e3")); + Value regexValue = TestUtil.wrap(new RegexValue("^foo", "i")); + Value bsonTimestampValue = TestUtil.wrap(new BsonTimestamp(1, 2)); + Value bsonObjectIdValue = TestUtil.wrap(new BsonObjectId("foo")); + Value bsonBinaryDataValue1 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {})); + Value bsonBinaryDataValue2 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); + + assertTrue(Values.isMinKey(minKeyValue)); + assertFalse(Values.isMinKey(maxKeyValue)); + assertFalse(Values.isMinKey(int32Value)); + assertFalse(Values.isMinKey(decimal128)); + assertFalse(Values.isMinKey(regexValue)); + assertFalse(Values.isMinKey(bsonTimestampValue)); + assertFalse(Values.isMinKey(bsonObjectIdValue)); + assertFalse(Values.isMinKey(bsonBinaryDataValue1)); + assertFalse(Values.isMinKey(bsonBinaryDataValue2)); + + assertFalse(Values.isMaxKey(minKeyValue)); + assertTrue(Values.isMaxKey(maxKeyValue)); + assertFalse(Values.isMaxKey(int32Value)); + assertFalse(Values.isMaxKey(decimal128)); + assertFalse(Values.isMaxKey(regexValue)); + assertFalse(Values.isMaxKey(bsonTimestampValue)); + assertFalse(Values.isMaxKey(bsonObjectIdValue)); + assertFalse(Values.isMaxKey(bsonBinaryDataValue1)); + assertFalse(Values.isMaxKey(bsonBinaryDataValue2)); + + assertFalse(Values.isInt32Value(minKeyValue)); + assertFalse(Values.isInt32Value(maxKeyValue)); + assertTrue(Values.isInt32Value(int32Value)); + assertFalse(Values.isInt32Value(decimal128)); + assertFalse(Values.isInt32Value(regexValue)); + assertFalse(Values.isInt32Value(bsonTimestampValue)); + assertFalse(Values.isInt32Value(bsonObjectIdValue)); + assertFalse(Values.isInt32Value(bsonBinaryDataValue1)); + assertFalse(Values.isInt32Value(bsonBinaryDataValue2)); + + assertFalse(Values.isDecimal128Value(minKeyValue)); + assertFalse(Values.isDecimal128Value(maxKeyValue)); + assertFalse(Values.isDecimal128Value(int32Value)); + assertTrue(Values.isDecimal128Value(decimal128)); + assertFalse(Values.isDecimal128Value(regexValue)); + assertFalse(Values.isDecimal128Value(bsonTimestampValue)); + assertFalse(Values.isDecimal128Value(bsonObjectIdValue)); + assertFalse(Values.isDecimal128Value(bsonBinaryDataValue1)); + assertFalse(Values.isDecimal128Value(bsonBinaryDataValue2)); + + assertFalse(Values.isRegexValue(minKeyValue)); + assertFalse(Values.isRegexValue(maxKeyValue)); + assertFalse(Values.isRegexValue(int32Value)); + assertFalse(Values.isRegexValue(decimal128)); + assertTrue(Values.isRegexValue(regexValue)); + assertFalse(Values.isRegexValue(bsonTimestampValue)); + assertFalse(Values.isRegexValue(bsonObjectIdValue)); + assertFalse(Values.isRegexValue(bsonBinaryDataValue1)); + assertFalse(Values.isRegexValue(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonTimestamp(minKeyValue)); + assertFalse(Values.isBsonTimestamp(maxKeyValue)); + assertFalse(Values.isBsonTimestamp(int32Value)); + assertFalse(Values.isBsonTimestamp(decimal128)); + assertFalse(Values.isBsonTimestamp(regexValue)); + assertTrue(Values.isBsonTimestamp(bsonTimestampValue)); + assertFalse(Values.isBsonTimestamp(bsonObjectIdValue)); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue1)); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonObjectId(minKeyValue)); + assertFalse(Values.isBsonObjectId(maxKeyValue)); + assertFalse(Values.isBsonObjectId(int32Value)); + assertFalse(Values.isBsonObjectId(decimal128)); + assertFalse(Values.isBsonObjectId(regexValue)); + assertFalse(Values.isBsonObjectId(bsonTimestampValue)); + assertTrue(Values.isBsonObjectId(bsonObjectIdValue)); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue1)); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonBinaryData(minKeyValue)); + assertFalse(Values.isBsonBinaryData(maxKeyValue)); + assertFalse(Values.isBsonBinaryData(int32Value)); + assertFalse(Values.isBsonBinaryData(decimal128)); + assertFalse(Values.isBsonBinaryData(regexValue)); + assertFalse(Values.isBsonBinaryData(bsonTimestampValue)); + assertFalse(Values.isBsonBinaryData(bsonObjectIdValue)); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue1)); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue2)); + + assertEquals(Values.detectMapRepresentation(minKeyValue), MapRepresentation.MIN_KEY); + assertEquals(Values.detectMapRepresentation(maxKeyValue), MapRepresentation.MAX_KEY); + assertEquals(Values.detectMapRepresentation(int32Value), MapRepresentation.INT32); + assertEquals(Values.detectMapRepresentation(decimal128), MapRepresentation.DECIMAL128); + assertEquals(Values.detectMapRepresentation(regexValue), MapRepresentation.REGEX); + assertEquals( + Values.detectMapRepresentation(bsonTimestampValue), MapRepresentation.BSON_TIMESTAMP); + assertEquals( + Values.detectMapRepresentation(bsonObjectIdValue), MapRepresentation.BSON_OBJECT_ID); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue1), MapRepresentation.BSON_BINARY); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue2), MapRepresentation.BSON_BINARY); + } + /** Small helper class that uses ProtoValues for equals() and compareTo(). */ static class EqualsWrapper implements Comparable { final Value proto; diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java index 52eec0ac4cd..d2c31db9623 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java @@ -39,9 +39,15 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.ArrayContainsAnyFilter; import com.google.firebase.firestore.core.FieldFilter; import com.google.firebase.firestore.core.InFilter; @@ -329,6 +335,153 @@ public void testEncodesVectorValue() { assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); } + @Test + public void testEncodesBsonObjectIds() { + Value model = wrap(new BsonObjectId("foo")); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__oid__", Value.newBuilder().setStringValue("foo").build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonTimestamps() { + Value model = wrap(new BsonTimestamp(12345, 67)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__request_timestamp__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "seconds", + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + "increment", Value.newBuilder().setIntegerValue(67).build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonBinaryData() { + Value model = wrap(BsonBinaryData.fromBytes(127, new byte[] {1, 2, 3})); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__binary__", + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {127, 1, 2, 3})) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesRegexValues() { + Value model = wrap(new RegexValue("^foo", "i")); + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__regex__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "pattern", + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + "options", Value.newBuilder().setStringValue("i").build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesInt32Values() { + Value model = wrap(new com.google.firebase.firestore.Int32Value(12345)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__int__", Value.newBuilder().setIntegerValue(12345).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesDecimal128Values() { + Value model = wrap(new com.google.firebase.firestore.Decimal128Value("1e3")); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__decimal128__", Value.newBuilder().setStringValue("1e3").build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMinKey() { + Value model = wrap(MinKey.instance()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__min__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMaxKey() { + Value model = wrap(MaxKey.instance()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__max__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + @Test public void testEncodeDeleteMutation() { Mutation mutation = deleteMutation("docs/1");