Skip to content

Commit 53007f1

Browse files
VidocqHremyoudemansMoElkhidir
authored
feat: s3 compatible storage (#15)
* feat: support S3 storage * fix(test): fs mock issue Related issue: aws/aws-sdk-js-v3#3547 (comment) * docs: s3 compatible storage --------- Co-authored-by: Rémy Oudemans <31566929+remyoudemans@users.noreply.github.com> Co-authored-by: Mo Elkhidir <mhharktat@gmail.com>
1 parent 73f0a69 commit 53007f1

7 files changed

Lines changed: 1666 additions & 4 deletions

File tree

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -252,6 +252,7 @@ Currently, we support:
252252
- Supabase Storage
253253
- Local filesystem storage
254254
- Google Cloud Storage (gcs)
255+
- AWS S3 Compatible Storage (s3)
255256

256257
More providers (S3, Azure, etc.) are welcome to be implemented by the community. The `StorageInterface` is quite simple and you can implement it for any blob storage service.
257258
</details>

__tests__/upload.test.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ jest.mock('../apiUtils/storage/StorageFactory');
1414
jest.mock('../apiUtils/helpers/ZipHelper');
1515
jest.mock('../apiUtils/helpers/HashHelper');
1616
jest.mock('formidable');
17-
jest.mock('fs');
1817
jest.mock('adm-zip');
1918

2019
describe('Upload API', () => {
@@ -48,7 +47,7 @@ describe('Upload API', () => {
4847

4948
// Mock file system
5049
const mockFileContent = Buffer.from('test file content');
51-
(fs.readFileSync as jest.Mock).mockReturnValue(mockFileContent);
50+
jest.spyOn(fs, 'readFileSync').mockReturnValue(mockFileContent);
5251

5352
// Mock AdmZip
5453
const mockZipFolder = {} as AdmZip;

apiUtils/storage/S3Storage.ts

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
import {
2+
S3Client,
3+
GetObjectCommand,
4+
HeadObjectCommand,
5+
ListObjectsV2Command,
6+
PutObjectCommand,
7+
CopyObjectCommand,
8+
} from '@aws-sdk/client-s3';
9+
import { StorageInterface } from './StorageInterface';
10+
11+
export class S3Storage implements StorageInterface {
12+
private client: S3Client;
13+
private bucketName: string;
14+
15+
constructor() {
16+
if (!process.env.S3_ACCESS_KEY_ID || !process.env.S3_SECRET_ACCESS_KEY) {
17+
throw new Error('S3 credentials not configured');
18+
}
19+
if (!process.env.S3_BUCKET_NAME) {
20+
throw new Error('S3 bucket name not configured');
21+
}
22+
this.client = new S3Client({
23+
region: process.env.S3_REGION ?? 'auto',
24+
endpoint: process.env.S3_ENDPOINT,
25+
credentials: {
26+
accessKeyId: process.env.S3_ACCESS_KEY_ID,
27+
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
28+
},
29+
});
30+
this.bucketName = process.env.S3_BUCKET_NAME;
31+
}
32+
33+
async copyFile(sourcePath: string, destinationPath: string): Promise<void> {
34+
const copyCommand = new CopyObjectCommand({
35+
Bucket: this.bucketName,
36+
CopySource: sourcePath,
37+
Key: destinationPath,
38+
});
39+
await this.client.send(copyCommand);
40+
}
41+
42+
async downloadFile(path: string): Promise<Buffer> {
43+
const getCommand = new GetObjectCommand({
44+
Bucket: this.bucketName,
45+
Key: path,
46+
});
47+
const response = await this.client.send(getCommand);
48+
const body = await response.Body?.transformToByteArray();
49+
if (!body) {
50+
throw new Error('No body found in response');
51+
}
52+
return Buffer.from(body);
53+
}
54+
55+
async fileExists(path: string): Promise<boolean> {
56+
try {
57+
const files = await this.listDirectories(path);
58+
if (files.length > 0) {
59+
return true;
60+
}
61+
} catch {}
62+
try {
63+
const headCommand = new HeadObjectCommand({
64+
Bucket: this.bucketName,
65+
Key: path.split('/').shift(),
66+
});
67+
await this.client.send(headCommand);
68+
return true;
69+
} catch {
70+
return false;
71+
}
72+
}
73+
74+
async listFiles(directory: string): Promise<
75+
{
76+
name: string;
77+
updated_at: string;
78+
created_at: string;
79+
metadata: { size: number; mimetype: string };
80+
}[]
81+
> {
82+
const prefix = `${directory}/`;
83+
const listCommand = new ListObjectsV2Command({
84+
Bucket: this.bucketName,
85+
Prefix: prefix,
86+
});
87+
const response = await this.client.send(listCommand);
88+
return (
89+
response.Contents?.map((file) => ({
90+
name: file.Key!.replace(prefix, ''),
91+
updated_at: file.LastModified?.toISOString() ?? '',
92+
created_at: file.LastModified?.toISOString() ?? '',
93+
metadata: {
94+
size: file.Size ?? 0,
95+
mimetype: this.getMimeType(file.Key?.split('.').pop() ?? 'unknown'),
96+
},
97+
})) ?? []
98+
);
99+
}
100+
101+
async listDirectories(directory: string): Promise<string[]> {
102+
const listCommand = new ListObjectsV2Command({
103+
Bucket: this.bucketName,
104+
Prefix: directory,
105+
Delimiter: '/',
106+
});
107+
const response = await this.client.send(listCommand);
108+
return (
109+
response.CommonPrefixes?.map((prefix) =>
110+
prefix.Prefix!.replace(directory, '').replace(/\/$/, '')
111+
) ?? []
112+
);
113+
}
114+
115+
async uploadFile(path: string, file: Buffer): Promise<string> {
116+
const uploadCommand = new PutObjectCommand({
117+
Bucket: this.bucketName,
118+
Key: path,
119+
Body: file,
120+
});
121+
await this.client.send(uploadCommand);
122+
return path;
123+
}
124+
125+
private getMimeType(ext: string): string {
126+
const mimeTypes: { [key: string]: string } = {
127+
js: 'application/javascript',
128+
json: 'application/json',
129+
png: 'image/png',
130+
jpg: 'image/jpeg',
131+
jpeg: 'image/jpeg',
132+
gif: 'image/gif',
133+
zip: 'application/zip',
134+
};
135+
return mimeTypes[ext.toLowerCase()] || 'application/octet-stream';
136+
}
137+
}

apiUtils/storage/StorageFactory.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { LocalStorage } from './LocalStorage';
22
import { StorageInterface } from './StorageInterface';
33
import { SupabaseStorage } from './SupabaseStorage';
44
import { GCSStorage } from './GCSStorage';
5+
import { S3Storage } from './S3Storage';
56
import { getLogger } from '../logger';
67

78
const logger = getLogger('StorageFactory');
@@ -18,6 +19,8 @@ export class StorageFactory {
1819
StorageFactory.instance = new LocalStorage();
1920
} else if (storageType === 'gcs') {
2021
StorageFactory.instance = new GCSStorage();
22+
} else if (storageType === 's3') {
23+
StorageFactory.instance = new S3Storage();
2124
} else {
2225
logger.error('Unsupported storage type', { storageType });
2326
throw new Error('Unsupported storage type');

docs/supportedStorageAlternatives.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,18 @@ GCP_BUCKET_NAME=your-gcs-bucket-name
2929
- Requires a GCP project with GCS bucket enabled
3030
- Bucket should be created manually before starting the server
3131

32+
### AWS S3 Compatible Storage
33+
```env
34+
BLOB_STORAGE_TYPE=s3
35+
S3_REGION=auto
36+
S3_ENDPOINT=your-s3-endpoint
37+
S3_ACCESS_KEY_ID=your-access-key-id
38+
S3_SECRET_ACCESS_KEY=your-secret-access-key
39+
S3_BUCKET_NAME=your-s3-bucket-name
40+
```
41+
- Support all S3 compatible storage (AWS S3, Digital Ocean Spaces, Cloudflare R2, etc.)
42+
- Bucket should be created manually before starting the server
43+
3244
## Supported Database Providers
3345
Database configuration is managed via `DB_TYPE`.
3446

0 commit comments

Comments
 (0)