99import { existsSync , readFileSync , renameSync , statSync , unlinkSync , writeFileSync } from "fs" ;
1010import { readdir } from "fs/promises" ;
1111import path from "path" ;
12+ import { gunzipSync } from "zlib" ;
1213import { Zip , ZipDeflate , ZipPassThrough } from "fflate" ;
1314import { createMedia } from "../../../db.js" ;
1415import { createLogger } from "../../../utils/logger.js" ;
1516import { MAX_ATTACH_BYTES , MAX_EDIT_BYTES , MAX_PREVIEW_BYTES , MAX_UPLOAD_BYTES } from "./constants.js" ;
1617import { contentTypeForPath , detectBinary , formatMtime , isImageFile , isTextFile } from "./file-utils.js" ;
1718import { isHiddenPath , resolveWorkspacePath , shouldIgnorePath , toRelativePath } from "./paths.js" ;
1819const log = createLogger ( "web.workspace.file-service" ) ;
20+ function parseZipEntries ( buffer , maxEntries = 200 ) {
21+ const eocdSignature = 0x06054b50 ;
22+ const cdSignature = 0x02014b50 ;
23+ const minEocdSize = 22 ;
24+ const maxCommentSize = 0xffff ;
25+ const searchStart = Math . max ( 0 , buffer . length - ( minEocdSize + maxCommentSize ) ) ;
26+ let eocdOffset = - 1 ;
27+ for ( let offset = buffer . length - minEocdSize ; offset >= searchStart ; offset -= 1 ) {
28+ if ( buffer . readUInt32LE ( offset ) === eocdSignature ) {
29+ eocdOffset = offset ;
30+ break ;
31+ }
32+ }
33+ if ( eocdOffset < 0 ) {
34+ throw new Error ( "ZIP end-of-central-directory record not found" ) ;
35+ }
36+ const entryCount = buffer . readUInt16LE ( eocdOffset + 10 ) ;
37+ const centralDirectoryOffset = buffer . readUInt32LE ( eocdOffset + 16 ) ;
38+ const entries = [ ] ;
39+ let offset = centralDirectoryOffset ;
40+ let totalEntries = 0 ;
41+ while ( offset + 46 <= buffer . length && totalEntries < entryCount ) {
42+ if ( buffer . readUInt32LE ( offset ) !== cdSignature )
43+ break ;
44+ const compressedSize = buffer . readUInt32LE ( offset + 20 ) ;
45+ const uncompressedSize = buffer . readUInt32LE ( offset + 24 ) ;
46+ const fileNameLength = buffer . readUInt16LE ( offset + 28 ) ;
47+ const extraLength = buffer . readUInt16LE ( offset + 30 ) ;
48+ const commentLength = buffer . readUInt16LE ( offset + 32 ) ;
49+ const externalAttrs = buffer . readUInt32LE ( offset + 38 ) ;
50+ const nameStart = offset + 46 ;
51+ const nameEnd = nameStart + fileNameLength ;
52+ if ( nameEnd > buffer . length )
53+ break ;
54+ const name = buffer . toString ( "utf8" , nameStart , nameEnd ) ;
55+ const isDirectory = name . endsWith ( "/" ) || ( ( externalAttrs >>> 16 ) & 0o170000 ) === 0o040000 ;
56+ totalEntries += 1 ;
57+ if ( entries . length < maxEntries ) {
58+ entries . push ( {
59+ name,
60+ compressedSize,
61+ uncompressedSize,
62+ isDirectory,
63+ } ) ;
64+ }
65+ offset = nameEnd + extraLength + commentLength ;
66+ }
67+ return {
68+ entries,
69+ totalEntries,
70+ truncated : totalEntries > entries . length ,
71+ } ;
72+ }
73+ function isTarGzPath ( filePath ) {
74+ const lower = filePath . toLowerCase ( ) ;
75+ return lower . endsWith ( ".tar.gz" ) || lower . endsWith ( ".tgz" ) ;
76+ }
77+ function parseTarEntries ( buffer , maxEntries = 200 ) {
78+ const entries = [ ] ;
79+ let offset = 0 ;
80+ let totalEntries = 0 ;
81+ while ( offset + 512 <= buffer . length ) {
82+ const header = buffer . subarray ( offset , offset + 512 ) ;
83+ if ( header . every ( ( byte ) => byte === 0 ) )
84+ break ;
85+ const rawName = header . toString ( "utf8" , 0 , 100 ) . replace ( / \0 .* $ / , "" ) ;
86+ const rawPrefix = header . toString ( "utf8" , 345 , 500 ) . replace ( / \0 .* $ / , "" ) ;
87+ const sizeOctal = header . toString ( "utf8" , 124 , 136 ) . replace ( / \0 .* $ / , "" ) . trim ( ) ;
88+ const typeFlag = header . toString ( "utf8" , 156 , 157 ) || "0" ;
89+ const size = sizeOctal ? parseInt ( sizeOctal . replace ( / \s / g, "" ) , 8 ) || 0 : 0 ;
90+ const name = rawPrefix ? `${ rawPrefix } /${ rawName } ` : rawName ;
91+ const isDirectory = typeFlag === "5" || name . endsWith ( "/" ) ;
92+ if ( ! name )
93+ break ;
94+ totalEntries += 1 ;
95+ if ( entries . length < maxEntries ) {
96+ entries . push ( {
97+ name,
98+ compressedSize : null ,
99+ uncompressedSize : size ,
100+ isDirectory,
101+ } ) ;
102+ }
103+ offset += 512 + Math . ceil ( size / 512 ) * 512 ;
104+ }
105+ return {
106+ entries,
107+ totalEntries,
108+ truncated : totalEntries > entries . length ,
109+ } ;
110+ }
111+ function formatArchiveListing ( label , relPath , stats , parsed ) {
112+ const lines = [
113+ `${ label } : ${ relPath } ` ,
114+ `Entries: ${ parsed . totalEntries } ` ,
115+ `Archive size: ${ stats . size } bytes` ,
116+ "" ,
117+ ] ;
118+ for ( const entry of parsed . entries ) {
119+ const kind = entry . isDirectory ? "dir " : "file" ;
120+ const compressionLabel = entry . compressedSize != null && entry . compressedSize !== entry . uncompressedSize
121+ ? `, ${ entry . compressedSize } B compressed`
122+ : "" ;
123+ const sizeLabel = entry . isDirectory ? "" : ` (${ entry . uncompressedSize } B${ compressionLabel } )` ;
124+ lines . push ( `${ kind } ${ entry . name } ${ sizeLabel } ` ) ;
125+ }
126+ if ( parsed . truncated ) {
127+ lines . push ( "" ) ;
128+ lines . push ( `… showing first ${ parsed . entries . length } entries of ${ parsed . totalEntries } .` ) ;
129+ }
130+ return lines . join ( "\n" ) ;
131+ }
19132function normalizeEntryName ( raw ) {
20133 const name = ( raw || "" ) . trim ( ) ;
21134 if ( ! name || name === "." || name === ".." )
@@ -41,6 +154,7 @@ export class WorkspaceFileService {
41154 const relPath = toRelativePath ( targetPath ) ;
42155 const contentType = contentTypeForPath ( targetPath ) ;
43156 const isImage = isImageFile ( targetPath ) ;
157+ const ext = path . extname ( targetPath ) . toLowerCase ( ) ;
44158 if ( isImage ) {
45159 const rawUrl = `/workspace/raw?path=${ encodeURIComponent ( relPath ) } ` ;
46160 return {
@@ -68,6 +182,39 @@ export class WorkspaceFileService {
68182 return { status : 400 , body : { error : "File too large to edit" } } ;
69183 }
70184 const buffer = readFileSync ( targetPath , { encoding : null } ) ;
185+ if ( ! isEditMode && ( ext === ".zip" || isTarGzPath ( targetPath ) ) ) {
186+ try {
187+ const isZip = ext === ".zip" ;
188+ const parsed = isZip ? parseZipEntries ( buffer ) : parseTarEntries ( gunzipSync ( buffer ) ) ;
189+ return {
190+ status : 200 ,
191+ body : {
192+ path : relPath ,
193+ name : path . basename ( targetPath ) ,
194+ kind : "text" ,
195+ content_type : contentType ,
196+ size : stats . size ,
197+ mtime : formatMtime ( stats ) ,
198+ text : formatArchiveListing ( isZip ? "ZIP archive" : "tar.gz archive" , relPath , stats , parsed ) ,
199+ truncated : parsed . truncated ,
200+ } ,
201+ } ;
202+ }
203+ catch {
204+ return {
205+ status : 200 ,
206+ body : {
207+ path : relPath ,
208+ name : path . basename ( targetPath ) ,
209+ kind : "binary" ,
210+ content_type : contentType ,
211+ size : stats . size ,
212+ mtime : formatMtime ( stats ) ,
213+ truncated : false ,
214+ } ,
215+ } ;
216+ }
217+ }
71218 const slice = buffer . subarray ( 0 , maxBytes ) ;
72219 const truncated = buffer . length > maxBytes ;
73220 if ( ! isTextFile ( targetPath ) && detectBinary ( slice ) ) {
@@ -113,7 +260,7 @@ export class WorkspaceFileService {
113260 return { status : 500 , body : { error : "Failed to read file" } } ;
114261 }
115262 }
116- getRaw ( pathParam ) {
263+ getRaw ( pathParam , download = false ) {
117264 const targetPath = resolveWorkspacePath ( pathParam ) ;
118265 if ( ! targetPath )
119266 return { status : 400 , body : "Invalid path" } ;
@@ -123,7 +270,15 @@ export class WorkspaceFileService {
123270 return { status : 400 , body : "Path is a directory" } ;
124271 const contentType = contentTypeForPath ( targetPath ) ;
125272 const file = Bun . file ( targetPath ) ;
126- return { status : 200 , body : file , contentType, filePath : targetPath , size : stats . size } ;
273+ return {
274+ status : 200 ,
275+ body : file ,
276+ contentType,
277+ filePath : targetPath ,
278+ size : stats . size ,
279+ filename : path . basename ( targetPath ) ,
280+ download,
281+ } ;
127282 }
128283 catch {
129284 return { status : 404 , body : "Not found" } ;
0 commit comments