Skip to content

Commit d0fd610

Browse files
authored
Merge pull request #124 from rhashimoto/file-export
Add export to file demo.
2 parents 56628ce + b25cbeb commit d0fd610

File tree

3 files changed

+148
-8
lines changed

3 files changed

+148
-8
lines changed

demo/file/index.html

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
<meta charset="utf-8">
55
<title>Local file transfer</title>
66
<style>
7+
button { margin-top: 1em; }
78
pre { margin: 0 }
89
</style>
910
<script type="module" defer src="./index.js"></script>
@@ -13,6 +14,9 @@ <h1>Sample database importer</h1>
1314
<div>
1415
<input type="file" id="file-import">
1516
</div>
17+
<div>
18+
<button id="file-export" disabled>Download</button>
19+
</div>
1620
<hr>
1721
</body>
1822
</html>

demo/file/index.js

Lines changed: 39 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,33 @@ const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db';
77

88
const DBFILE_MAGIC = 'SQLite format 3\x00';
99

10+
// Use a service worker for downloading. This is currently the only
11+
// cross-browser way to stream to a local file.
12+
navigator.serviceWorker.register('service-worker.js', { type: 'module' });
13+
(async function() {
14+
// Enable the export button when the service worker is responding.
15+
while (true) {
16+
let delay = 25;
17+
const response = await fetch('./export?check=true');
18+
if (response.ok) {
19+
// @ts-ignore
20+
document.getElementById('file-export').disabled = false;
21+
return;
22+
}
23+
await new Promise(resolve => setTimeout(resolve, delay));
24+
delay = Math.min(delay * 2, 5000);
25+
}
26+
})();
27+
28+
document.getElementById('file-export').addEventListener('click', async () => {
29+
// Fetch from the special URL handled by the service worker. All
30+
// the magic happens there.
31+
const url = new URL('./export', location.href);
32+
url.searchParams.set('idb', IDB_NAME);
33+
url.searchParams.set('db', DB_NAME);
34+
window.open(url);
35+
});
36+
1037
document.getElementById('file-import').addEventListener('change', async event => {
1138
let vfs;
1239
try {
@@ -16,6 +43,7 @@ document.getElementById('file-import').addEventListener('change', async event =>
1643
await importDatabase(vfs, DB_NAME, event.target.files[0].stream());
1744
log('Import complete');
1845

46+
// Use a Worker to verify the database with SQLite.
1947
log('Verifying database integrity');
2048
const url = new URL('./verifier.js', location.href);
2149
url.searchParams.set('idb', IDB_NAME);
@@ -45,6 +73,8 @@ document.getElementById('file-import').addEventListener('change', async event =>
4573
* @param {ReadableStream} stream
4674
*/
4775
async function importDatabase(vfs, path, stream) {
76+
// This generator converts arbitrary sized chunks from the stream
77+
// into SQLite pages.
4878
async function* pagify() {
4979
/** @type {Uint8Array[]} */ const chunks = [];
5080
const reader = stream.getReader();
@@ -57,7 +87,7 @@ async function importDatabase(vfs, path, stream) {
5787
chunks.push(value);
5888
}
5989

60-
// Assemble the file header.
90+
// Consolidate the header into a single DataView.
6191
let copyOffset = 0;
6292
const header = new DataView(new ArrayBuffer(32));
6393
for (const chunk of chunks) {
@@ -71,11 +101,12 @@ async function importDatabase(vfs, path, stream) {
71101
throw new Error('Not a SQLite database file');
72102
}
73103

74-
// Extract page parameters.
104+
// Extract page fields.
75105
const pageSize = (field => field === 1 ? 65536 : field)(header.getUint16(16));
76106
const pageCount = header.getUint32(28);
77107
log(`${pageCount} pages, ${pageSize} bytes each, ${pageCount * pageSize} bytes total`);
78108

109+
// Yield each page in sequence.
79110
log('Copying pages...');
80111
for (let i = 0; i < pageCount; ++i) {
81112
// Read enough chunks to produce the next page.
@@ -85,7 +116,7 @@ async function importDatabase(vfs, path, stream) {
85116
chunks.push(value);
86117
}
87118

88-
// Assemble the page.
119+
// Assemble the page into a single Uint8Array.
89120
// TODO: Optimize case where first chunk has >= pageSize bytes.
90121
let copyOffset = 0;
91122
const page = new Uint8Array(pageSize);
@@ -94,8 +125,8 @@ async function importDatabase(vfs, path, stream) {
94125
const src = chunks[0].subarray(0, pageSize - copyOffset);
95126
const dst = new Uint8Array(page.buffer, copyOffset);
96127
dst.set(src);
97-
98128
copyOffset += src.byteLength;
129+
99130
if (src.byteLength === chunks[0].byteLength) {
100131
// All the bytes in the chunk were consumed.
101132
chunks.shift();
@@ -130,8 +161,8 @@ async function importDatabase(vfs, path, stream) {
130161
onFinally.push(() => vfs.xUnlock(fileId, VFS.SQLITE_LOCK_SHARED));
131162
await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_EXCLUSIVE));
132163

133-
const empty = new DataView(new ArrayBuffer(4));
134-
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_BEGIN_ATOMIC_WRITE, empty);
164+
const ignored = new DataView(new ArrayBuffer(4));
165+
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_BEGIN_ATOMIC_WRITE, ignored);
135166

136167
// Write pages.
137168
let iOffset = 0;
@@ -140,8 +171,8 @@ async function importDatabase(vfs, path, stream) {
140171
iOffset += page.byteLength;
141172
}
142173

143-
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_COMMIT_ATOMIC_WRITE, empty);
144-
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_SYNC, empty);
174+
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_COMMIT_ATOMIC_WRITE, ignored);
175+
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_SYNC, ignored);
145176
await vfs.xSync(fileId, VFS.SQLITE_SYNC_NORMAL);
146177
} finally {
147178
while (onFinally.length) {

demo/file/service-worker.js

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
import * as VFS from "../../src/VFS.js";
2+
import { IDBBatchAtomicVFS } from "../../src/examples/IDBBatchAtomicVFS.js";
3+
4+
// Install the service worker as soon as possible.
5+
globalThis.addEventListener('install', (/** @type {ExtendableEvent} */ event) => {
6+
event.waitUntil(globalThis.skipWaiting());
7+
});
8+
globalThis.addEventListener('activate', (/** @type {ExtendableEvent} */ event) => {
9+
event.waitUntil(globalThis.clients.claim());
10+
});
11+
12+
globalThis.addEventListener('fetch', async (/** @type {FetchEvent} */ event) => {
13+
const url = new URL(event.request.url);
14+
if (!url.href.includes(globalThis.registration.scope)) return;
15+
if (!url.pathname.endsWith('export')) return;
16+
17+
if (url.searchParams.has('check')) {
18+
return event.respondWith(new Response('OK'));
19+
}
20+
21+
const vfs = new IDBBatchAtomicVFS(url.searchParams.get('idb'));
22+
const path = url.searchParams.get('db');
23+
const source = new DatabaseSource(vfs, path);
24+
event.waitUntil(source.isDone.finally(() => vfs.close()));
25+
return event.respondWith(new Response(new ReadableStream(source), {
26+
headers: {
27+
"Content-Type": 'application/octet-stream',
28+
"Content-Disposition": `attachment; filename=sqlite.db`
29+
}
30+
}));
31+
});
32+
33+
class DatabaseSource {
34+
isDone;
35+
36+
#vfs;
37+
#path;
38+
#fileId = Math.floor(Math.random() * 0x100000000);
39+
#iOffset = 0;
40+
#bytesRemaining = 0;
41+
42+
#onDone = [];
43+
#resolve;
44+
#reject;
45+
46+
constructor(vfs, path) {
47+
this.#vfs = vfs;
48+
this.#path = path;
49+
this.isDone = new Promise((resolve, reject) => {
50+
this.#resolve = resolve;
51+
this.#reject = reject;
52+
}).finally(async () => {
53+
while (this.#onDone.length) {
54+
await this.#onDone.pop()();
55+
}
56+
});
57+
}
58+
59+
async start(controller) {
60+
try {
61+
// Open the file for reading.
62+
const flags = VFS.SQLITE_OPEN_MAIN_DB | VFS.SQLITE_OPEN_READONLY;
63+
await check(this.#vfs.xOpen(this.#path, this.#fileId, flags, {setInt32(){}}));
64+
this.#onDone.push(() => this.#vfs.xClose(this.#fileId));
65+
await check(this.#vfs.xLock(this.#fileId, VFS.SQLITE_LOCK_SHARED));
66+
this.#onDone.push(() => this.#vfs.xUnlock(this.#fileId, VFS.SQLITE_LOCK_NONE));
67+
68+
// Get the file size.
69+
const fileSize = new DataView(new ArrayBuffer(8));
70+
await check(this.#vfs.xFileSize(this.#fileId, fileSize));
71+
this.#bytesRemaining = Number(fileSize.getBigUint64(0, true));
72+
} catch (e) {
73+
controller.error(e);
74+
this.#reject(e);
75+
}
76+
}
77+
78+
async pull(controller) {
79+
try {
80+
const buffer = new Uint8Array(Math.min(this.#bytesRemaining, 65536));
81+
await check(this.#vfs.xRead(this.#fileId, buffer, this.#iOffset));
82+
controller.enqueue(buffer);
83+
84+
this.#iOffset += buffer.byteLength;
85+
this.#bytesRemaining -= buffer.byteLength;
86+
if (this.#bytesRemaining === 0) {
87+
controller.close();
88+
this.#resolve();
89+
}
90+
} catch (e) {
91+
controller.error(e);
92+
this.#reject(e);
93+
}
94+
}
95+
96+
cancel(reason) {
97+
this.#reject(new Error(reason));
98+
}
99+
};
100+
101+
async function check(code) {
102+
if (await code !== VFS.SQLITE_OK) {
103+
throw new Error(`Error code: ${code}`);
104+
}
105+
}

0 commit comments

Comments
 (0)