Skip to content

Commit c6ed170

Browse files
committed
fix: docker buildx cache upload
1 parent e83e1ca commit c6ed170

File tree

3 files changed

+50
-29
lines changed

3 files changed

+50
-29
lines changed

lib/storage/index.ts

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,13 @@ export interface Storage {
2929
archiveLocation: string
3030
} | null>
3131
download: (objectName: string) => Promise<ReadableStream | Readable>
32-
uploadChunk: (
33-
uploadId: number,
34-
chunkStream: ReadableStream<Buffer>,
35-
chunkStart: number,
36-
chunkEnd: number,
37-
) => Promise<void>
32+
uploadChunk: (opts: {
33+
uploadId: number
34+
chunkStream: ReadableStream<Buffer>
35+
chunkStart: number
36+
chunkEnd: number
37+
chunkIndex: number
38+
}) => Promise<void>
3839
commitCache: (uploadId: number | string, size: number) => Promise<void>
3940
reserveCache: (
4041
key: string,
@@ -107,7 +108,7 @@ export async function initializeStorage() {
107108
cacheId: uploadId,
108109
}
109110
},
110-
async uploadChunk(uploadId, chunkStream, chunkStart, chunkEnd) {
111+
async uploadChunk({ uploadId, chunkStream, chunkStart, chunkEnd, chunkIndex }) {
111112
const upload = await db
112113
.selectFrom('uploads')
113114
.selectAll()
@@ -124,11 +125,7 @@ export async function initializeStorage() {
124125
throw new Error('Chunk end must be greater than chunk start')
125126
}
126127

127-
// this should be the correct chunk size except for the last chunk
128-
const chunkSize = Math.floor(chunkStart / (chunkEnd - chunkStart) + 1)
129-
// this should handle the incorrect chunk size of the last chunk by just setting it to the limit of 10000 (for s3)
130-
// TODO find a better way to calculate chunk size
131-
const partNumber = Math.min(chunkSize, 10_000)
128+
const partNumber = chunkIndex + 1
132129

133130
const objectName = getObjectNameFromKey(upload.key, upload.version)
134131
try {

routes/_apis/artifactcache/caches/[cacheId].patch.ts

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,20 @@ export default defineEventHandler(async (event) => {
3737
throw createError({ statusCode: 400, statusMessage: 'Invalid content-range header' })
3838
}
3939

40+
// this should be the correct chunk size except for the last chunk
41+
const chunkSize = Math.floor(start / (end - start))
42+
// this should handle the incorrect chunk size of the last chunk by just setting it to the limit of 10000 (for s3)
43+
// TODO find a better way to calculate chunk size
44+
const chunkIndex = Math.min(chunkSize, 10_000)
45+
4046
const adapter = await useStorageAdapter()
41-
await adapter.uploadChunk(cacheId, stream as ReadableStream<Buffer>, start, end)
47+
await adapter.uploadChunk({
48+
uploadId: cacheId,
49+
chunkStream: stream as ReadableStream<Buffer>,
50+
chunkStart: start,
51+
chunkEnd: end,
52+
chunkIndex,
53+
})
4254
})
4355

4456
function parseContentRangeHeader(contentRange: string) {

routes/upload/[cacheId].put.ts

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,12 @@ import { logger } from '~/lib/logger'
66
import { useStorageAdapter } from '~/lib/storage'
77

88
// https://github.com/actions/toolkit/blob/340a6b15b5879eefe1412ee6c8606978b091d3e8/packages/cache/src/cache.ts#L470
9-
const chunkSize = 64 * 1024 * 1024
9+
const MB = 1024 * 1024
1010

1111
const pathParamsSchema = z.object({
1212
cacheId: z.coerce.number(),
1313
})
1414

15-
const sizeByBlockId = new Map<string, number>()
16-
1715
export default defineEventHandler(async (event) => {
1816
const parsedPathParams = pathParamsSchema.safeParse(event.context.params)
1917
if (!parsedPathParams.success)
@@ -24,7 +22,7 @@ export default defineEventHandler(async (event) => {
2422

2523
if (getQuery(event).comp === 'blocklist') {
2624
setResponseStatus(event, 201)
27-
return 'ok'
25+
return
2826
}
2927

3028
const blockId = getQuery(event)?.blockid as string
@@ -51,26 +49,40 @@ export default defineEventHandler(async (event) => {
5149
throw createError({ statusCode: 400, statusMessage: "'content-length' header is required" })
5250
}
5351

54-
sizeByBlockId.set(blockId, contentLength)
52+
const userAgent = getHeader(event, 'user-agent')
53+
54+
// 1 MB for docker buildx
55+
// 64 MB for everything else
56+
const chunkSize = userAgent && userAgent.startsWith('azsdk-go-azblob') ? MB : 64 * MB
5557
const start = chunkIndex * chunkSize
5658
const end = start + contentLength - 1
5759

5860
const adapter = await useStorageAdapter()
59-
await adapter.uploadChunk(cacheId, stream as ReadableStream<Buffer>, start, end)
61+
await adapter.uploadChunk({
62+
uploadId: cacheId,
63+
chunkStream: stream as ReadableStream<Buffer>,
64+
chunkStart: start,
65+
chunkEnd: end,
66+
chunkIndex,
67+
})
6068

6169
setResponseStatus(event, 201)
6270
})
6371

64-
/**
65-
* Format (base64 decoded): 06a9ffa8-2e62-4e96-8e5b-15f24c117f1f000000000006
66-
*/
67-
function getChunkIndexFromBlockId(blockId: string) {
68-
const decoded = Buffer.from(blockId, 'base64').toString('utf8')
69-
if (decoded.length !== 48) return
72+
function getChunkIndexFromBlockId(blockIdBase64: string) {
73+
const base64Decoded = Buffer.from(blockIdBase64, 'base64')
7074

71-
// slice off uuid and convert to number
72-
const index = Number.parseInt(decoded.slice(36))
73-
if (Number.isNaN(index)) return
75+
// 64 bytes used by docker buildx
76+
// 48 bytes used by everything else
77+
if (base64Decoded.length === 64) {
78+
return base64Decoded.readUInt32BE(16)
79+
} else if (base64Decoded.length === 48) {
80+
const decoded = base64Decoded.toString('utf8')
7481

75-
return index
82+
// slice off uuid and convert to number
83+
const index = Number.parseInt(decoded.slice(36))
84+
if (Number.isNaN(index)) return
85+
86+
return index
87+
}
7688
}

0 commit comments

Comments
 (0)