Skip to content

Commit 48aa9b8

Browse files
cmdcolinclaude
andcommitted
add publish workflow with npm trusted publishing
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent a1fdaa7 commit 48aa9b8

5 files changed

Lines changed: 44 additions & 20 deletions

File tree

src/bgzfBlockScan.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ export function scanBgzfBlocks(
2020

2121
while (offset + BGZF_MIN_BLOCK_SIZE <= input.length) {
2222
if (
23-
input[offset] !== 0x1f ||
24-
input[offset + 1] !== 0x8b ||
23+
input[offset] !== 0x1F ||
24+
input[offset + 1] !== 0x8B ||
2525
input[offset + 2] !== 8 ||
2626
input[offset + 3] !== 4
2727
) {

src/unzip.ts

Lines changed: 2 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,8 @@ import {
66
decompressAll,
77
decompressChunkSlice,
88
} from './wasm/bgzf-wasm-inlined.js'
9-
import type { BgzfWorkerPool } from './workerPool.ts'
10-
11-
export interface BlockCache {
12-
get(key: string): { buffer: Uint8Array; bytesRead: number } | undefined
13-
set(key: string, value: { buffer: Uint8Array; bytesRead: number }): void
14-
}
159

16-
export interface Filehandle {
17-
read(
18-
length: number,
19-
position: number,
20-
opts?: Record<string, unknown>,
21-
): Promise<Uint8Array>
22-
}
10+
import type { BgzfWorkerPool } from './workerPool.ts'
2311

2412
interface VirtualOffset {
2513
blockPosition: number
@@ -31,7 +19,7 @@ interface Chunk {
3119
}
3220

3321
function hasGzipHeader(data: Uint8Array) {
34-
return data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b
22+
return data.length >= 2 && data[0] === 0x1F && data[1] === 0x8B
3523
}
3624

3725
async function decompressGzip(inputData: Uint8Array) {

src/workerPool.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -197,8 +197,6 @@ export async function createBgzfWorkerPool(
197197
const numW = workers.length
198198
const blocksPerWorker = Math.ceil(blocks.length / numW)
199199

200-
// Dispatch contiguous byte ranges — each worker calls decompressAll
201-
// on its slice (single WASM call, single Decompressor, single output Vec)
202200
const rangeInfos: { startBlock: number; endBlock: number }[] = []
203201
const promises: Promise<RangeResult>[] = []
204202

test/browser/index.html

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
// Warmup
3737
for (let i = 0; i < 3; i++) {
3838
await unzipChunkSlice(data, chunk)
39-
await unzipChunkSlice(data, chunk, undefined, pool)
39+
await unzipChunkSlice(data, chunk, pool)
4040
}
4141

4242
const seqStart = performance.now()
@@ -144,7 +144,7 @@
144144
],
145145
]) {
146146
const sequential = await unzipChunkSlice(data, chunk)
147-
const parallel = await unzipChunkSlice(data, chunk, undefined, pool)
147+
const parallel = await unzipChunkSlice(data, chunk, pool)
148148

149149
const buffersMatch =
150150
sequential.buffer.length === parallel.buffer.length &&

test/workerPool.test.ts

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,44 @@ test('scanBgzfBlocks on T_ko.2bit.gz', () => {
4141
expect(totalDecompressed).toBeGreaterThan(0)
4242
})
4343

44+
test('scanBgzfBlocks returns empty for empty input', () => {
45+
const blocks = scanBgzfBlocks(new Uint8Array(0), 0, 0)
46+
expect(blocks).toEqual([])
47+
})
48+
49+
test('scanBgzfBlocks returns empty for input shorter than min block size', () => {
50+
const blocks = scanBgzfBlocks(new Uint8Array(25), 0, 100)
51+
expect(blocks).toEqual([])
52+
})
53+
54+
test('scanBgzfBlocks stops on truncated block', () => {
55+
const testData = fs.readFileSync(require.resolve('./data/paired.bam'))
56+
const allBlocks = scanBgzfBlocks(testData, 0, testData.length)
57+
const firstBlockSize = allBlocks[0]!.compressedSize
58+
59+
// truncate in the middle of the second block
60+
const truncated = testData.subarray(0, firstBlockSize + 10)
61+
const blocks = scanBgzfBlocks(truncated, 0, truncated.length)
62+
expect(blocks.length).toBe(1)
63+
expect(blocks[0]!.compressedSize).toBe(firstBlockSize)
64+
})
65+
66+
test('scanBgzfBlocks stops on non-bgzf data', () => {
67+
const blocks = scanBgzfBlocks(new Uint8Array(1000), 0, 1000)
68+
expect(blocks).toEqual([])
69+
})
70+
71+
test('scanBgzfBlocks includes block at maxBlockPosition', () => {
72+
const testData = fs.readFileSync(require.resolve('./data/paired.bam'))
73+
const allBlocks = scanBgzfBlocks(testData, 0, testData.length)
74+
// set maxBlockPosition to exactly the second block's position
75+
const secondBlockPos = allBlocks[1]!.filePosition
76+
const blocks = scanBgzfBlocks(testData, 0, secondBlockPos)
77+
// should include the block at maxBlockPosition (inclusive)
78+
expect(blocks.length).toBe(2)
79+
expect(blocks[1]!.filePosition).toBe(secondBlockPos)
80+
})
81+
4482
test('scanBgzfBlocks with large filePosition offset', () => {
4583
const testData = fs.readFileSync(require.resolve('./data/paired.bam'))
4684
const startPos = 5_000_000_000_000

0 commit comments

Comments
 (0)