@@ -7,6 +7,33 @@ const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db';
77
88const DBFILE_MAGIC = 'SQLite format 3\x00' ;
99
10+ // Use a service worker for downloading. This is currently the only
11+ // cross-browser way to stream to a local file.
12+ navigator . serviceWorker . register ( 'service-worker.js' , { type : 'module' } ) ;
13+ ( async function ( ) {
14+ // Enable the export button when the service worker is responding.
15+ while ( true ) {
16+ let delay = 25 ;
17+ const response = await fetch ( './export?check=true' ) ;
18+ if ( response . ok ) {
19+ // @ts -ignore
20+ document . getElementById ( 'file-export' ) . disabled = false ;
21+ return ;
22+ }
23+ await new Promise ( resolve => setTimeout ( resolve , delay ) ) ;
24+ delay = Math . min ( delay * 2 , 5000 ) ;
25+ }
26+ } ) ( ) ;
27+
28+ document . getElementById ( 'file-export' ) . addEventListener ( 'click' , async ( ) => {
29+ // Fetch from the special URL handled by the service worker. All
30+ // the magic happens there.
31+ const url = new URL ( './export' , location . href ) ;
32+ url . searchParams . set ( 'idb' , IDB_NAME ) ;
33+ url . searchParams . set ( 'db' , DB_NAME ) ;
34+ window . open ( url ) ;
35+ } ) ;
36+
1037document . getElementById ( 'file-import' ) . addEventListener ( 'change' , async event => {
1138 let vfs ;
1239 try {
@@ -16,6 +43,7 @@ document.getElementById('file-import').addEventListener('change', async event =>
1643 await importDatabase ( vfs , DB_NAME , event . target . files [ 0 ] . stream ( ) ) ;
1744 log ( 'Import complete' ) ;
1845
46+ // Use a Worker to verify the database with SQLite.
1947 log ( 'Verifying database integrity' ) ;
2048 const url = new URL ( './verifier.js' , location . href ) ;
2149 url . searchParams . set ( 'idb' , IDB_NAME ) ;
@@ -45,6 +73,8 @@ document.getElementById('file-import').addEventListener('change', async event =>
4573 * @param {ReadableStream } stream
4674 */
4775async function importDatabase ( vfs , path , stream ) {
76+ // This generator converts arbitrary sized chunks from the stream
77+ // into SQLite pages.
4878 async function * pagify ( ) {
4979 /** @type {Uint8Array[] } */ const chunks = [ ] ;
5080 const reader = stream . getReader ( ) ;
@@ -57,7 +87,7 @@ async function importDatabase(vfs, path, stream) {
5787 chunks . push ( value ) ;
5888 }
5989
60- // Assemble the file header.
90+ // Consolidate the header into a single DataView .
6191 let copyOffset = 0 ;
6292 const header = new DataView ( new ArrayBuffer ( 32 ) ) ;
6393 for ( const chunk of chunks ) {
@@ -71,11 +101,12 @@ async function importDatabase(vfs, path, stream) {
71101 throw new Error ( 'Not a SQLite database file' ) ;
72102 }
73103
74- // Extract page parameters .
104+ // Extract page fields .
75105 const pageSize = ( field => field === 1 ? 65536 : field ) ( header . getUint16 ( 16 ) ) ;
76106 const pageCount = header . getUint32 ( 28 ) ;
77107 log ( `${ pageCount } pages, ${ pageSize } bytes each, ${ pageCount * pageSize } bytes total` ) ;
78108
109+ // Yield each page in sequence.
79110 log ( 'Copying pages...' ) ;
80111 for ( let i = 0 ; i < pageCount ; ++ i ) {
81112 // Read enough chunks to produce the next page.
@@ -85,7 +116,7 @@ async function importDatabase(vfs, path, stream) {
85116 chunks . push ( value ) ;
86117 }
87118
88- // Assemble the page.
119+ // Assemble the page into a single Uint8Array .
89120 // TODO: Optimize case where first chunk has >= pageSize bytes.
90121 let copyOffset = 0 ;
91122 const page = new Uint8Array ( pageSize ) ;
@@ -94,8 +125,8 @@ async function importDatabase(vfs, path, stream) {
94125 const src = chunks [ 0 ] . subarray ( 0 , pageSize - copyOffset ) ;
95126 const dst = new Uint8Array ( page . buffer , copyOffset ) ;
96127 dst . set ( src ) ;
97-
98128 copyOffset += src . byteLength ;
129+
99130 if ( src . byteLength === chunks [ 0 ] . byteLength ) {
100131 // All the bytes in the chunk were consumed.
101132 chunks . shift ( ) ;
@@ -130,8 +161,8 @@ async function importDatabase(vfs, path, stream) {
130161 onFinally . push ( ( ) => vfs . xUnlock ( fileId , VFS . SQLITE_LOCK_SHARED ) ) ;
131162 await check ( vfs . xLock ( fileId , VFS . SQLITE_LOCK_EXCLUSIVE ) ) ;
132163
133- const empty = new DataView ( new ArrayBuffer ( 4 ) ) ;
134- await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_BEGIN_ATOMIC_WRITE , empty ) ;
164+ const ignored = new DataView ( new ArrayBuffer ( 4 ) ) ;
165+ await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_BEGIN_ATOMIC_WRITE , ignored ) ;
135166
136167 // Write pages.
137168 let iOffset = 0 ;
@@ -140,8 +171,8 @@ async function importDatabase(vfs, path, stream) {
140171 iOffset += page . byteLength ;
141172 }
142173
143- await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_COMMIT_ATOMIC_WRITE , empty ) ;
144- await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_SYNC , empty ) ;
174+ await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_COMMIT_ATOMIC_WRITE , ignored ) ;
175+ await vfs . xFileControl ( fileId , VFS . SQLITE_FCNTL_SYNC , ignored ) ;
145176 await vfs . xSync ( fileId , VFS . SQLITE_SYNC_NORMAL ) ;
146177 } finally {
147178 while ( onFinally . length ) {
0 commit comments