Skip to content

Commit

Permalink
[Update] powersync sqlite core (#3)
Browse files Browse the repository at this point in the history
* Work around stack overflow in debug build.

* Handle 64-bit arguments with Emscripten legalization.

* Export all SQLite3 API functions, plus Emscripten utilities.

* Update issue templates

* Update follow-redirects per Dependabot.

* Fix rhashimoto#143. Handle detached buffers in IDBBatchAtomicVFS.

* Bump package version.

* Fix rhashimoto#143 for Safari (no ArrayBuffer.prototype.detached).

* Bump package version.

* updated powersync-sqlite-core to v0.1.6

* updated from upstream

* publish packages on workflow dispatch

---------

Co-authored-by: Roy Hashimoto <[email protected]>
Co-authored-by: Roy Hashimoto <[email protected]>
  • Loading branch information
3 people authored Feb 1, 2024
1 parent 760a35d commit fabeb50
Show file tree
Hide file tree
Showing 17 changed files with 590 additions and 244 deletions.
5 changes: 5 additions & 0 deletions .changeset/gorgeous-starfishes-sparkle.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@journeyapps/wa-sqlite": patch
---

Updated from upstream changes
5 changes: 5 additions & 0 deletions .changeset/rare-panthers-study.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@journeyapps/wa-sqlite": patch
---

Updated powersync-sqlite-core to v0.1.6
10 changes: 10 additions & 0 deletions .github/ISSUE_TEMPLATE/bug_report.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
name: Bug report
about: Report potential problems in project code
title: ''
labels: ''
assignees: ''

---

In this project, Issues are only for possible bugs in project code. Please don't ask for help debugging _your_ code; I have plenty of challenges debugging _my_ code. 🥲
4 changes: 2 additions & 2 deletions .github/workflows/dev-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Packages are versioned as `0.0.0-{tag}-DATETIMESTAMP`
name: Packages Deploy

on: push
on: workflow_dispatch

jobs:
publish:
Expand All @@ -14,7 +14,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v2
with:
node-version: 18
node-version: 16

- name: Setup Yarn
run: |
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ jobs:
- name: Checkout Repo
uses: actions/checkout@v3

- name: Setup Node.js 18
- name: Setup Node.js 16
uses: actions/setup-node@v3
with:
node-version: 18
node-version: 16

- name: Install Dependencies
run: yarn
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ EMFLAGS_COMMON = \

EMFLAGS_DEBUG = \
-s ASSERTIONS=1 \
-g \
-g -Oz \
$(EMFLAGS_COMMON)

EMFLAGS_DIST = \
Expand Down
234 changes: 134 additions & 100 deletions dist/wa-sqlite-async.mjs

Large diffs are not rendered by default.

Binary file modified dist/wa-sqlite-async.wasm
Binary file not shown.
225 changes: 130 additions & 95 deletions dist/wa-sqlite.mjs

Large diffs are not rendered by default.

Binary file modified dist/wa-sqlite.wasm
Binary file not shown.
68 changes: 63 additions & 5 deletions src/examples/IDBBatchAtomicVFS.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@ export class IDBBatchAtomicVFS extends VFS.Base {
#taskTimestamp = performance.now();
#pendingAsync = new Set();

// Asyncify can grow WebAssembly memory during an asynchronous call.
// If this happens, then any array buffer arguments will be detached.
// The workaround is when finding a detached buffer, set this handler
// function to process the new buffer outside handlerAsync().
#growthHandler = null;

constructor(idbDatabaseName = 'wa-sqlite', options = DEFAULT_OPTIONS) {
super();
this.name = idbDatabaseName;
Expand All @@ -84,7 +90,7 @@ export class IDBBatchAtomicVFS extends VFS.Base {
* @returns {number}
*/
xOpen(name, fileId, flags, pOutFlags) {
return this.handleAsync(async () => {
const result = this.handleAsync(async () => {
if (name === null) name = `null_${fileId}`;
log(`xOpen ${name} 0x${fileId.toString(16)} 0x${flags.toString(16)}`);

Expand Down Expand Up @@ -118,13 +124,25 @@ export class IDBBatchAtomicVFS extends VFS.Base {
}
}
});

// @ts-ignore
if (pOutFlags.buffer.detached || !pOutFlags.buffer.byteLength) {
pOutFlags = new DataView(new ArrayBuffer(4));
this.#growthHandler = (pOutFlagsNew) => {
pOutFlagsNew.setInt32(0, pOutFlags.getInt32(0, true), true);
};
}
pOutFlags.setInt32(0, flags & VFS.SQLITE_OPEN_READONLY, true);
return VFS.SQLITE_OK;
} catch (e) {
console.error(e);
return VFS.SQLITE_CANTOPEN;
}
});

this.#growthHandler?.(pOutFlags);
this.#growthHandler = null;
return result;
}

/**
Expand Down Expand Up @@ -160,7 +178,8 @@ export class IDBBatchAtomicVFS extends VFS.Base {
* @returns {number}
*/
xRead(fileId, pData, iOffset) {
return this.handleAsync(async () => {
const byteLength = pData.byteLength;
const result = this.handleAsync(async () => {
const file = this.#mapIdToFile.get(fileId);
log(`xRead ${file.path} ${pData.byteLength} ${iOffset}`);

Expand All @@ -170,6 +189,15 @@ export class IDBBatchAtomicVFS extends VFS.Base {
// one case - rollback after journal spill - where reads cross
// write boundaries so we have to allow for that.
const result = await this.#idb.run('readonly', async ({blocks}) => {
// @ts-ignore
if (pData.buffer.detached || !pData.buffer.byteLength) {
// WebAssembly memory has grown, invalidating our buffer. Use
// a temporary buffer and copy after this asynchronous call
// completes.
pData = new Uint8Array(byteLength);
this.#growthHandler = (pDataNew) => pDataNew.set(pData);
}

let pDataOffset = 0;
while (pDataOffset < pData.byteLength) {
// Fetch the IndexedDB block for this file location.
Expand Down Expand Up @@ -200,6 +228,10 @@ export class IDBBatchAtomicVFS extends VFS.Base {
return VFS.SQLITE_IOERR;
}
});

this.#growthHandler?.(pData);
this.#growthHandler = null;
return result;
}

/**
Expand All @@ -221,7 +253,7 @@ export class IDBBatchAtomicVFS extends VFS.Base {
}
await new Promise(resolve => setTimeout(resolve));

const result = this.#xWriteHelper(fileId, pData, iOffset);
const result = this.#xWriteHelper(fileId, pData.slice(), iOffset);
this.#taskTimestamp = performance.now();
return result;
});
Expand Down Expand Up @@ -436,14 +468,28 @@ export class IDBBatchAtomicVFS extends VFS.Base {
* @returns {number}
*/
xCheckReservedLock(fileId, pResOut) {
return this.handleAsync(async () => {
const result = this.handleAsync(async () => {
const file = this.#mapIdToFile.get(fileId);
log(`xCheckReservedLock ${file.path}`);

const isReserved = await file.locks.isSomewhereReserved();
function setOutput(pResOut) {
};

// @ts-ignore
if (pResOut.buffer.detached || !pResOut.buffer.byteLength) {
pResOut = new DataView(new ArrayBuffer(4));
this.#growthHandler = (pResOutNew) => {
pResOutNew.setInt32(0, pResOut.getInt32(0, true), true);
};
}
pResOut.setInt32(0, isReserved ? 1 : 0, true);
return VFS.SQLITE_OK;
});

this.#growthHandler?.(pResOut);
this.#growthHandler = null;
return result;
}

/**
Expand Down Expand Up @@ -611,7 +657,7 @@ export class IDBBatchAtomicVFS extends VFS.Base {
* @returns {number}
*/
xAccess(name, flags, pResOut) {
return this.handleAsync(async () => {
const result = this.handleAsync(async () => {
try {
const path = new URL(name, 'file://localhost/').pathname;
log(`xAccess ${path} ${flags}`);
Expand All @@ -620,13 +666,25 @@ export class IDBBatchAtomicVFS extends VFS.Base {
const key = await this.#idb.run('readonly', ({blocks}) => {
return blocks.getKey(this.#bound({path}, 0));
});

// @ts-ignore
if (pResOut.buffer.detached || !pResOut.buffer.byteLength) {
pResOut = new DataView(new ArrayBuffer(4));
this.#growthHandler = (pResOutNew) => {
pResOutNew.setInt32(0, pResOut.getInt32(0, true), true);
}
}
pResOut.setInt32(0, key ? 1 : 0, true);
return VFS.SQLITE_OK;
} catch (e) {
console.error(e);
return VFS.SQLITE_IOERR;
}
});

this.#growthHandler?.(pResOut);
this.#growthHandler = null;
return result;
}

/**
Expand Down
Loading

0 comments on commit fabeb50

Please sign in to comment.