Skip to content

Commit 9cd288d

Browse files
committed
refactor(app): Decouple from @openneuro/client
The client will no longer be needed with the move to deno CLI and this allows for frontend specific client configuration. Fixes error handling not bubbling up properly across the React app.
1 parent 0d7bdd7 commit 9cd288d

File tree

11 files changed

+342
-102
lines changed

11 files changed

+342
-102
lines changed

.pnp.cjs

Lines changed: 66 additions & 67 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

packages/openneuro-app/package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
"@emotion/react": "11.11.1",
2020
"@emotion/styled": "11.11.0",
2121
"@niivue/niivue": "0.45.1",
22-
"@openneuro/client": "^4.28.3",
2322
"@openneuro/components": "^4.28.3",
2423
"@sentry/react": "^8.25.0",
2524
"@tanstack/react-table": "^8.9.3",

packages/openneuro-app/src/client.jsx

Lines changed: 15 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,12 @@
33
*/
44
import "./scripts/utils/global-polyfill"
55
import "./scripts/sentry"
6-
import { ApolloProvider, InMemoryCache } from "@apollo/client"
7-
import { createClient } from "@openneuro/client"
6+
import { ApolloClient, ApolloProvider, InMemoryCache } from "@apollo/client"
87
import React from "react"
98
import { createRoot } from "react-dom/client"
109
import { BrowserRouter, Route, Routes } from "react-router-dom"
1110
import App from "./scripts/app"
1211
import Index from "./scripts/index"
13-
import { version } from "./lerna.json"
1412
import { config } from "./scripts/config"
1513
import * as gtag from "./scripts/utils/gtag"
1614
import { relayStylePagination } from "@apollo/client/utilities"
@@ -20,22 +18,22 @@ gtag.initialize(config.analytics.trackingIds)
2018

2119
const mainElement = document.getElementById("main")
2220
const container = createRoot(mainElement)
21+
const client = new ApolloClient({
22+
uri: `${config.url}/crn/graphql`,
23+
cache: new InMemoryCache({
24+
typePolicies: {
25+
Query: {
26+
fields: {
27+
advancedSearch: relayStylePagination(),
28+
},
29+
},
30+
},
31+
}),
32+
})
33+
2334
container.render(
2435
<App>
25-
<ApolloProvider
26-
client={createClient(`${config.url}/crn/graphql`, {
27-
clientVersion: version,
28-
cache: new InMemoryCache({
29-
typePolicies: {
30-
Query: {
31-
fields: {
32-
advancedSearch: relayStylePagination(),
33-
},
34-
},
35-
},
36-
}),
37-
})}
38-
>
36+
<ApolloProvider client={client}>
3937
<BrowserRouter>
4038
<Routes>
4139
<Route path="*" element={<Index />} />

packages/openneuro-app/src/scripts/dataset/download/download-query.js

Lines changed: 36 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,43 @@
1-
import { datasets } from "@openneuro/client"
1+
import { gql } from "@apollo/client"
2+
3+
export const DOWNLOAD_DATASET = gql`
4+
query downloadDraft($datasetId: ID!, $tree: String) {
5+
dataset(id: $datasetId) {
6+
id
7+
draft {
8+
id
9+
files(tree: $tree) {
10+
id
11+
directory
12+
filename
13+
size
14+
urls
15+
}
16+
}
17+
}
18+
}
19+
`
20+
21+
export const DOWNLOAD_SNAPSHOT = gql`
22+
query downloadSnapshot($datasetId: ID!, $tag: String!, $tree: String) {
23+
snapshot(datasetId: $datasetId, tag: $tag) {
24+
id
25+
files(tree: $tree) {
26+
id
27+
directory
28+
filename
29+
size
30+
urls
31+
}
32+
}
33+
}
34+
`
235

336
export const downloadDataset =
437
(client) => async ({ datasetId, snapshotTag, tree = null }) => {
538
if (snapshotTag) {
639
const { data } = await client.query({
7-
query: datasets.downloadSnapshot,
40+
query: DOWNLOAD_SNAPSHOT,
841
variables: {
942
datasetId,
1043
tag: snapshotTag,
@@ -14,7 +47,7 @@ export const downloadDataset =
1447
return data.snapshot.files
1548
} else {
1649
const { data } = await client.query({
17-
query: datasets.downloadDataset,
50+
query: DOWNLOAD_DATASET,
1851
variables: {
1952
datasetId,
2053
tree,
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
/**
2+
* Convert from a URL compatible path
3+
* @param {String} path
4+
*/
5+
export const decodeFilePath = (path) => {
6+
return path.replace(new RegExp(":", "g"), "/")
7+
}
8+
9+
/**
10+
* Determine parallelism based on Request list
11+
* @param {Array<Request>} requests
12+
* @param {number} bytes expected total size of all requests
13+
* @returns {number}
14+
*/
15+
export function uploadParallelism(requests, bytes) {
16+
const averageSize = bytes / requests.length
17+
const parallelism = averageSize / 524288 // 512KB
18+
if (parallelism > 8) {
19+
return 8
20+
} else if (parallelism < 2) {
21+
return 2
22+
} else {
23+
return Math.round(parallelism)
24+
}
25+
}
26+
27+
/**
28+
* Extract filename from Request URL
29+
* @param {string} url .../a:path:to:a:file
30+
*/
31+
export function parseFilename(url) {
32+
const filePath = url.substring(url.lastIndexOf("/") + 1)
33+
return decodeFilePath(filePath)
34+
}
35+
36+
/**
37+
* Control retry delay for upload file requests
38+
* @param {number} step Attempt number
39+
* @param {Request} request Active request
40+
*/
41+
export async function retryDelay(step, request) {
42+
if (step <= 4) {
43+
await new Promise((r) => setTimeout(r, step ** 2 * 1000))
44+
} else {
45+
throw new Error(
46+
`Failed to upload file after ${step} attempts - "${request.url}"`,
47+
)
48+
}
49+
}
50+
51+
/**
52+
* Repeatable function for single file upload fetch request
53+
* @param {object} uploadProgress Progress controller instance
54+
* @param {typeof fetch} fetch Fetch implementation to use - useful for environments without a native fetch
55+
* @returns {function (Request, number): Promise<Response|void>}
56+
*/
57+
export const uploadFile =
58+
(uploadProgress, fetch) => async (request, attempt = 1) => {
59+
// Create a retry function with attempts incremented
60+
const filename = parseFilename(request.url)
61+
const handleFailure = async (failure) => {
62+
const retryClone = request.clone()
63+
// eslint-disable-next-line no-console
64+
console.warn(`\nRetrying upload for ${filename}: ${failure}`)
65+
try {
66+
await retryDelay(attempt, request)
67+
return uploadFile(uploadProgress, fetch)(retryClone, attempt + 1)
68+
} catch (err) {
69+
if ("failUpload" in uploadProgress) {
70+
uploadProgress.failUpload(filename)
71+
}
72+
throw err
73+
}
74+
}
75+
// This is needed to cancel the request in case of client errors
76+
if ("startUpload" in uploadProgress) {
77+
uploadProgress.startUpload(filename)
78+
}
79+
try {
80+
// Clone before using the request to allow retries to reuse the body
81+
const response = await fetch(request)
82+
if (response.status === 200) {
83+
// We need to wait for the response body or fetch-h2 may leave the connection open
84+
await response.json()
85+
if ("finishUpload" in uploadProgress) {
86+
uploadProgress.finishUpload(filename)
87+
}
88+
uploadProgress.increment()
89+
} else {
90+
await handleFailure(response.statusText)
91+
}
92+
} catch (err) {
93+
await handleFailure(err)
94+
}
95+
}
96+
97+
/**
98+
* @param {Request[]} requests
99+
* @param {number} totalSize
100+
* @param {object} uploadProgress
101+
* @param {typeof fetch} fetch
102+
*/
103+
export async function uploadParallel(
104+
requests,
105+
totalSize,
106+
uploadProgress,
107+
fetch,
108+
) {
109+
// Array stride of parallel requests
110+
const parallelism = uploadParallelism(requests, totalSize)
111+
for (
112+
let rIndex = 0;
113+
rIndex < requests.length;
114+
rIndex = rIndex + parallelism
115+
) {
116+
await Promise.allSettled(
117+
requests
118+
.slice(rIndex, rIndex + parallelism)
119+
.map(uploadFile(uploadProgress, fetch)),
120+
)
121+
}
122+
}

packages/openneuro-app/src/scripts/uploader/file-upload.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { config } from "../config"
2-
import { uploads } from "@openneuro/client"
2+
import { uploadParallel } from "./file-upload-parallel"
33

44
/**
55
* Trim the webkitRelativePath value to only include the dataset relative path
@@ -38,7 +38,7 @@ export const getRelativePath = (
3838
*/
3939
export const encodeFilePath = (file, options = { stripRelativePath: false }) =>
4040
file.webkitRelativePath
41-
? uploads.encodeFilePath(getRelativePath(file, options))
41+
? getRelativePath(file, options).replace(new RegExp("/", "g"), ":")
4242
: file.name
4343

4444
/**
@@ -85,5 +85,5 @@ export async function uploadFiles({
8585

8686
// No background fetch
8787
// Parallelism is handled by the client in this case
88-
return uploads.uploadParallel(requests, totalSize, uploadProgress, fetch)
88+
return uploadParallel(requests, totalSize, uploadProgress, fetch)
8989
}
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
/**
2+
* Java hashcode implementation for browser and Node.js
3+
* @param {string} str
4+
*/
5+
function hashCode(str) {
6+
return str
7+
.split("")
8+
.reduce(
9+
(prevHash, currVal) =>
10+
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
11+
0,
12+
)
13+
}
14+
15+
/**
16+
* Calculate a hash from a list of files to upload
17+
* @param {string} datasetId Dataset namespace for this hash
18+
* @param {Array<object>} files Files being uploaded
19+
* @returns {string} Hex string identity hash
20+
*/
21+
export function hashFileList(datasetId, files) {
22+
return Math.abs(
23+
hashCode(
24+
datasetId +
25+
files
26+
.map(
27+
(f) =>
28+
`${
29+
"webkitRelativePath" in f ? f.webkitRelativePath : f.filename
30+
}:${f.size}`,
31+
)
32+
.sort()
33+
.join(":"),
34+
),
35+
).toString(16)
36+
}

packages/openneuro-app/src/scripts/uploader/upload-mutation.js

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,34 @@
1-
import { datasets, uploads } from "@openneuro/client"
1+
import { gql } from "@apollo/client"
22
import { SUBMIT_METADATA } from "../dataset/mutations/submit-metadata.jsx"
33

4+
export const CREATE_DATASET = gql`
5+
mutation createDataset($affirmedDefaced: Boolean, $affirmedConsent: Boolean) {
6+
createDataset(
7+
affirmedDefaced: $affirmedDefaced
8+
affirmedConsent: $affirmedConsent
9+
) {
10+
id
11+
}
12+
}
13+
`
14+
15+
export const PREPARE_UPLOAD = gql`
16+
mutation prepareUpload($datasetId: ID!, $uploadId: ID!) {
17+
prepareUpload(datasetId: $datasetId, uploadId: $uploadId) {
18+
id
19+
datasetId
20+
token
21+
endpoint
22+
}
23+
}
24+
`
25+
26+
export const FINISH_UPLOAD = gql`
27+
mutation finishUpload($uploadId: ID!) {
28+
finishUpload(uploadId: $uploadId)
29+
}
30+
`
31+
432
/**
533
* Create a dataset and update the label
634
* @param {object} client Apollo client
@@ -9,7 +37,7 @@ export const createDataset =
937
(client) => ({ affirmedDefaced, affirmedConsent }) => {
1038
return client
1139
.mutate({
12-
mutation: datasets.createDataset,
40+
mutation: CREATE_DATASET,
1341
variables: { affirmedDefaced, affirmedConsent },
1442
errorPolicy: "all",
1543
})
@@ -22,7 +50,7 @@ export const createDataset =
2250
*/
2351
export const prepareUpload = (client) => ({ datasetId, uploadId }) => {
2452
return client.mutate({
25-
mutation: uploads.prepareUpload,
53+
mutation: PREPARE_UPLOAD,
2654
variables: { datasetId, uploadId },
2755
})
2856
}
@@ -33,7 +61,7 @@ export const prepareUpload = (client) => ({ datasetId, uploadId }) => {
3361
*/
3462
export const finishUpload = (client) => (uploadId) => {
3563
return client.mutate({
36-
mutation: uploads.finishUpload,
64+
mutation: FINISH_UPLOAD,
3765
variables: { uploadId },
3866
})
3967
}

0 commit comments

Comments
 (0)