Skip to content

Commit c74ebad

Browse files
authored
Merge pull request #3535 from OpenNeuroOrg/feat/update-file-check-mutation
Add FileCheck model and updateFileCheck mutation
2 parents a0ae008 + 4fe6994 commit c74ebad

File tree

5 files changed

+139
-1
lines changed

5 files changed

+139
-1
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import FileCheck from "../../models/fileCheck"
2+
import { checkDatasetAdmin } from "../permissions"
3+
4+
export const updateFileCheck = async (
5+
obj,
6+
{ datasetId, hexsha, refs, remote, annexFsck },
7+
{ user, userInfo },
8+
) => {
9+
await checkDatasetAdmin(datasetId, user, userInfo)
10+
return await FileCheck.findOneAndUpdate(
11+
{ datasetId, hexsha },
12+
{ datasetId, hexsha, remote, refs, annexFsck },
13+
{ upsert: true, new: true },
14+
)
15+
.lean()
16+
.exec()
17+
}

packages/openneuro-server/src/graphql/resolvers/mutation.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ import {
4545
} from "./importRemoteDataset"
4646
import { saveAdminNote } from "./datasetEvents"
4747
import { createGitEvent } from "./gitEvents"
48+
import { updateFileCheck } from "./fileCheck"
4849

4950
const Mutation = {
5051
createDataset,
@@ -93,6 +94,7 @@ const Mutation = {
9394
updateUser,
9495
saveAdminNote,
9596
createGitEvent,
97+
updateFileCheck,
9698
}
9799

98100
export default Mutation

packages/openneuro-server/src/graphql/schema.ts

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,14 @@ export const typeDefs = `
205205
saveAdminNote(id: ID, datasetId: ID!, note: String!): DatasetEvent
206206
# Create a git event log for dataset changes
207207
createGitEvent(datasetId: ID!, commit: String!, reference: String!): DatasetEvent
208+
# Create or update a fileCheck document
209+
updateFileCheck(
210+
datasetId: ID!
211+
hexsha: String!
212+
refs: [String!]!
213+
annexFsck: [AnnexFsckInput!]!
214+
remote: String
215+
): FileCheck
208216
}
209217
210218
# Anonymous dataset reviewer
@@ -900,6 +908,33 @@ export const typeDefs = `
900908
# Notes associated with the event
901909
note: String
902910
}
911+
912+
type FileCheck {
913+
datasetId: String!
914+
hexsha: String!
915+
refs: [String!]!
916+
annexFsck: [AnnexFsck!]
917+
remote: String
918+
}
919+
920+
type AnnexFsck {
921+
command: String
922+
errorMessages: [String]
923+
file: String
924+
key: String
925+
note: String
926+
success: Boolean
927+
}
928+
929+
input AnnexFsckInput {
930+
command: String
931+
errorMessages: [String]
932+
file: String
933+
key: String
934+
note: String
935+
success: Boolean
936+
}
937+
903938
`
904939

905940
schemaComposer.addTypeDefs(typeDefs)
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import mongoose from "mongoose"
2+
import type { Document } from "mongoose"
3+
const { Schema, model } = mongoose
4+
5+
export interface FileCheckDocument extends Document {
6+
datasetId: string
7+
hexsha: string
8+
refs: string[]
9+
remote: string
10+
annexFsck: {
11+
command: string
12+
"error-messages": string[]
13+
file: string
14+
key: string
15+
note: string
16+
success: boolean
17+
}[]
18+
}
19+
20+
const fileCheckSchema = new Schema({
21+
datasetId: { type: String, required: true },
22+
hexsha: { type: String, required: true },
23+
refs: { type: [String], required: true },
24+
remote: { type: String, default: "local", required: true },
25+
annexFsck: [{
26+
command: String,
27+
"error-messages": [String],
28+
file: String,
29+
key: String,
30+
note: String,
31+
success: Boolean,
32+
}],
33+
})
34+
35+
const FileCheck = model<FileCheckDocument>("FileCheck", fileCheckSchema)
36+
37+
export default FileCheck

services/datalad/datalad_service/common/openneuro.py

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,26 @@
11
import requests
2+
from pathlib import Path
3+
import jwt
4+
import logging
5+
from datetime import datetime, timedelta, timezone
26

3-
from datalad_service.config import GRAPHQL_ENDPOINT
7+
from datalad_service.config import GRAPHQL_ENDPOINT, JWT_SECRET
8+
9+
10+
def generate_service_token(dataset_id):
11+
utc_now = datetime.now(timezone.utc)
12+
one_day_ahead = utc_now + timedelta(hours=24)
13+
return jwt.encode(
14+
{
15+
'sub': 'dataset-worker',
16+
'iat': int(utc_now.timestamp()),
17+
'exp': int(one_day_ahead.timestamp()),
18+
'scopes': ['dataset:worker'],
19+
'dataset': dataset_id,
20+
},
21+
JWT_SECRET,
22+
algorithm='HS256',
23+
)
424

525

626
def cache_clear_mutation(dataset_id, tag):
@@ -18,3 +38,30 @@ def clear_dataset_cache(dataset, tag, cookies={}):
1838
)
1939
if r.status_code != 200:
2040
raise Exception(r.text)
41+
42+
43+
def update_file_check(dataset_path, commit, references, bad_files, remote=None):
44+
"""Post results of git-annex fsck to graphql endpoint."""
45+
dataset_id = Path(dataset_path).name
46+
try:
47+
post_body = {
48+
'query': 'mutation updateFileCheck($datasetId: ID!, $hexsha: String!, $refs: [String!]!, $annexFsck: [AnnexFsckInput!]!) { updateFileCheck(datasetId: $datasetId, hexsha: $hexsha, refs: $refs, annexFsck: $annexFsck) { datasetId, hexsha } }',
49+
'variables': {
50+
'datasetId': dataset_id,
51+
'hexsha': str(commit.id),
52+
'refs': references,
53+
'annexFsck': bad_files,
54+
},
55+
'operationName': 'updateFileCheck',
56+
}
57+
if remote:
58+
post_body['variables']['remote'] = remote
59+
req = requests.post(
60+
url=GRAPHQL_ENDPOINT,
61+
json=post_body,
62+
headers={'authorization': f'Bearer {generate_service_token(dataset_id)}'},
63+
)
64+
req.raise_for_status()
65+
except requests.exceptions.HTTPError as e:
66+
logging.error(e)
67+
logging.error(req.text)

0 commit comments

Comments
 (0)