@@ -67,7 +67,8 @@ import {
6767 FirestoreError ,
6868 QuerySnapshot ,
6969 vector ,
70- getDocsFromServer
70+ getDocsFromServer ,
71+ Bytes
7172} from '../util/firebase_export' ;
7273import {
7374 apiDescribe ,
@@ -80,7 +81,8 @@ import {
8081 withNamedTestDbsOrSkipUnlessUsingEmulator ,
8182 toDataArray ,
8283 checkOnlineAndOfflineResultsMatch ,
83- toIds
84+ toIds ,
85+ withEmptyTestCollection
8486} from '../util/helpers' ;
8587import { DEFAULT_SETTINGS , DEFAULT_PROJECT_ID } from '../util/settings' ;
8688
@@ -2691,4 +2693,112 @@ apiDescribe('Database', persistence => {
26912693 }
26922694 ) ;
26932695 } ) ;
2696+
2697+ describe ( 'Test large documents' , ( ) => {
2698+ /**
2699+ * Returns a Blob with the size equal to the largest number of bytes allowed to
2700+ * be stored in a Firestore document.
2701+ */
2702+ function getLargestDocContent ( ) : object {
2703+ const MAX_BYTES_PER_FIELD_VALUE = 1048487 ;
2704+ // Subtract 8 for '__name__', 20 for its value, and 4 for 'blob'.
2705+ const numBytesToUse = MAX_BYTES_PER_FIELD_VALUE - 8 - 20 - 4 ;
2706+
2707+ const bytes = new Uint8Array ( numBytesToUse ) ;
2708+ for ( let i = 0 ; i < bytes . length ; i ++ ) {
2709+ // Fill with random byte (0-255)
2710+ bytes [ i ] = Math . floor ( Math . random ( ) * 256 ) ;
2711+ }
2712+
2713+ return { blob : Bytes . fromUint8Array ( bytes ) } ;
2714+ }
2715+
2716+ it ( 'can CRUD and query large documents' , async ( ) => {
2717+ return withEmptyTestCollection ( persistence , async coll => {
2718+ const docRef = doc ( coll ) ;
2719+ const data = getLargestDocContent ( ) ;
2720+
2721+ // Set
2722+ await setDoc ( docRef , data ) ;
2723+
2724+ // Get
2725+ let docSnap = await getDoc ( docRef ) ;
2726+ expect ( docSnap . data ( ) ) . to . deep . equal ( data ) ;
2727+
2728+ // Update
2729+ const newData = getLargestDocContent ( ) ;
2730+ await updateDoc ( docRef , newData ) ;
2731+ docSnap = await getDoc ( docRef ) ;
2732+ expect ( docSnap . data ( ) ) . to . deep . equal ( newData ) ;
2733+
2734+ // Query
2735+ const querySnap = await getDocs ( coll ) ;
2736+ expect ( querySnap . size ) . to . equal ( 1 ) ;
2737+ expect ( toDataArray ( querySnap ) ) . to . deep . equal ( [ newData ] ) ;
2738+
2739+ // Delete
2740+ await deleteDoc ( docRef ) ;
2741+ docSnap = await getDoc ( docRef ) ;
2742+ expect ( docSnap . exists ( ) ) . to . be . false ;
2743+ } ) ;
2744+ } ) ;
2745+
2746+ it ( 'can CRUD large documents inside transaction' , async ( ) => {
2747+ return withEmptyTestCollection ( persistence , async ( coll , db ) => {
2748+ const data = getLargestDocContent ( ) ;
2749+ const newData = getLargestDocContent ( ) ;
2750+
2751+ const docRef1 = doc ( coll , 'doc1' ) ;
2752+ const docRef2 = doc ( coll , 'doc2' ) ;
2753+ const docRef3 = doc ( coll , 'doc3' ) ;
2754+
2755+ await setDoc ( docRef1 , data ) ;
2756+ await setDoc ( docRef3 , data ) ;
2757+
2758+ await runTransaction ( db , async tx => {
2759+ // Get and update
2760+ const docSnap = await tx . get ( docRef1 ) ;
2761+ expect ( docSnap . data ( ) ) . to . deep . equal ( data ) ;
2762+ tx . update ( docRef1 , newData ) ;
2763+
2764+ // Set
2765+ tx . set ( docRef2 , data ) ;
2766+
2767+ // Delete
2768+ tx . delete ( docRef3 ) ;
2769+ } ) ;
2770+
2771+ let docSnap = await getDoc ( docRef1 ) ;
2772+ expect ( docSnap . data ( ) ) . to . deep . equal ( newData ) ;
2773+
2774+ docSnap = await getDoc ( docRef2 ) ;
2775+ expect ( docSnap . data ( ) ) . to . deep . equal ( data ) ;
2776+
2777+ docSnap = await getDoc ( docRef3 ) ;
2778+ expect ( docSnap . exists ( ) ) . to . be . false ;
2779+ } ) ;
2780+ } ) . timeout ( 10000 ) ;
2781+
2782+ it ( 'can listen to large query snapshots' , async ( ) => {
2783+ const testDoc = { doc1 : getLargestDocContent ( ) } ;
2784+ return withTestCollection ( persistence , testDoc , async coll => {
2785+ const storeEvent = new EventsAccumulator < QuerySnapshot > ( ) ;
2786+ const unsubscribe = onSnapshot ( coll , storeEvent . storeEvent ) ;
2787+ const watchSnapshot = await storeEvent . awaitEvent ( ) ;
2788+ expect ( toDataArray ( watchSnapshot ) ) . to . deep . equal ( [ testDoc . doc1 ] ) ;
2789+ unsubscribe ( ) ;
2790+ } ) ;
2791+ } ) ;
2792+
2793+ it ( 'can listen to large document snapshots' , async ( ) => {
2794+ const testDoc = getLargestDocContent ( ) ;
2795+ return withTestDocAndInitialData ( persistence , testDoc , async docRef => {
2796+ const storeEvent = new EventsAccumulator < DocumentSnapshot > ( ) ;
2797+ const unsubscribe = onSnapshot ( docRef , storeEvent . storeEvent ) ;
2798+ const watchSnapshot = await storeEvent . awaitEvent ( ) ;
2799+ expect ( watchSnapshot . data ( ) ) . to . deep . equal ( testDoc ) ;
2800+ unsubscribe ( ) ;
2801+ } ) ;
2802+ } ) ;
2803+ } ) ;
26942804} ) ;
0 commit comments