@@ -10,6 +10,12 @@ import { loadTSV, loadTSVGZ } from './tsv.ts'
1010import { streamFromString } from '../tests/utils.ts'
1111import { ColumnsMap } from '../types/columns.ts'
1212
13+ function compressedStreamFromString ( str : string ) : ReadableStream < Uint8Array < ArrayBuffer > > {
14+ return streamFromString ( str ) . pipeThrough ( new CompressionStream ( 'gzip' ) ) as ReadableStream <
15+ Uint8Array < ArrayBuffer >
16+ >
17+ }
18+
1319Deno . test ( 'TSV loading' , async ( t ) => {
1420 await t . step ( 'Empty file produces empty map' , async ( ) => {
1521 const file = pathToFile ( '/empty.tsv' )
@@ -182,7 +188,7 @@ Deno.test('TSV loading', async (t) => {
182188Deno . test ( 'TSVGZ loading' , async ( t ) => {
183189 await t . step ( 'No header and empty file produces empty map' , async ( ) => {
184190 const file = pathToFile ( '/empty.tsv.gz' )
185- file . stream = streamFromString ( '' ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
191+ file . stream = compressedStreamFromString ( '' )
186192
187193 const map = await loadTSVGZ ( file , [ ] )
188194 // map.size looks for a column called map, so work around it
@@ -191,7 +197,7 @@ Deno.test('TSVGZ loading', async (t) => {
191197
192198 await t . step ( 'Empty file produces header-only map' , async ( ) => {
193199 const file = pathToFile ( '/empty.tsv.gz' )
194- file . stream = streamFromString ( '' ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
200+ file . stream = compressedStreamFromString ( '' )
195201
196202 const map = await loadTSVGZ ( file , [ 'a' , 'b' , 'c' ] )
197203 assertEquals ( map . a , [ ] )
@@ -201,15 +207,15 @@ Deno.test('TSVGZ loading', async (t) => {
201207
202208 await t . step ( 'Single column file produces single column maps' , async ( ) => {
203209 const file = pathToFile ( '/single_column.tsv' )
204- file . stream = streamFromString ( '1\n2\n3\n' ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
210+ file . stream = compressedStreamFromString ( '1\n2\n3\n' )
205211
206212 const map = await loadTSVGZ ( file , [ 'a' ] )
207213 assertEquals ( map . a , [ '1' , '2' , '3' ] )
208214 } )
209215
210216 await t . step ( 'Mismatched header length throws issue' , async ( ) => {
211217 const file = pathToFile ( '/single_column.tsv.gz' )
212- file . stream = streamFromString ( '1\n2\n3\n' ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
218+ file . stream = compressedStreamFromString ( '1\n2\n3\n' )
213219
214220 try {
215221 await loadTSVGZ ( file , [ 'a' , 'b' ] )
@@ -220,17 +226,15 @@ Deno.test('TSVGZ loading', async (t) => {
220226
221227 await t . step ( 'Missing final newline is ignored' , async ( ) => {
222228 const file = pathToFile ( '/missing_newline.tsv.gz' )
223- file . stream = streamFromString ( '1\n2\n3' ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
229+ file . stream = compressedStreamFromString ( '1\n2\n3' )
224230
225231 const map = await loadTSVGZ ( file , [ 'a' ] )
226232 assertEquals ( map . a , [ '1' , '2' , '3' ] )
227233 } )
228234
229235 await t . step ( 'Empty row throws issue' , async ( ) => {
230236 const file = pathToFile ( '/empty_row.tsv.gz' )
231- file . stream = streamFromString ( '1\t2\t3\n\n4\t5\t6\n' ) . pipeThrough (
232- new CompressionStream ( 'gzip' ) ,
233- )
237+ file . stream = compressedStreamFromString ( '1\t2\t3\n\n4\t5\t6\n' )
234238
235239 try {
236240 await loadTSVGZ ( file , [ 'a' , 'b' , 'c' ] )
@@ -255,35 +259,33 @@ Deno.test('TSVGZ loading', async (t) => {
255259 // Use 1500 to avoid overlap with default initial capacity
256260 const headers = [ 'a' , 'b' , 'c' ]
257261 const text = '1\t2\t3\n' . repeat ( 1500 )
258- file . stream = streamFromString ( text ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
262+ file . stream = compressedStreamFromString ( text )
259263
260264 let map = await loadTSVGZ ( file , headers , 0 )
261265 assertEquals ( map . a , [ ] )
262266 assertEquals ( map . b , [ ] )
263267 assertEquals ( map . c , [ ] )
264268
265- file . stream = streamFromString ( text ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
269+ file . stream = compressedStreamFromString ( text )
266270 map = await loadTSVGZ ( file , headers , 1 )
267271 assertEquals ( map . a , [ '1' ] )
268272 assertEquals ( map . b , [ '2' ] )
269273 assertEquals ( map . c , [ '3' ] )
270274
271- file . stream = streamFromString ( text ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
275+ file . stream = compressedStreamFromString ( text )
272276 map = await loadTSVGZ ( file , headers , 2 )
273277 assertEquals ( map . a , [ '1' , '1' ] )
274278 assertEquals ( map . b , [ '2' , '2' ] )
275279 assertEquals ( map . c , [ '3' , '3' ] )
276280
277- file . stream = streamFromString ( text ) . pipeThrough ( new CompressionStream ( 'gzip' ) )
281+ file . stream = compressedStreamFromString ( text )
278282 map = await loadTSVGZ ( file , headers , - 1 )
279283 assertEquals ( map . a , Array ( 1500 ) . fill ( '1' ) )
280284 assertEquals ( map . b , Array ( 1500 ) . fill ( '2' ) )
281285 assertEquals ( map . c , Array ( 1500 ) . fill ( '3' ) )
282286
283287 // Check that maxRows does not truncate shorter files
284- file . stream = streamFromString ( '1\t2\t3\n4\t5\t6\n7\t8\t9\n' ) . pipeThrough (
285- new CompressionStream ( 'gzip' ) ,
286- )
288+ file . stream = compressedStreamFromString ( '1\t2\t3\n4\t5\t6\n7\t8\t9\n' )
287289 map = await loadTSVGZ ( file , headers , 4 )
288290 assertEquals ( map . a , [ '1' , '4' , '7' ] )
289291 assertEquals ( map . b , [ '2' , '5' , '8' ] )
0 commit comments