Skip to content

Commit 9de4e1b

Browse files
authored
Merge pull request #3392 from OpenNeuroOrg/feat/readme-md
Handle README.md and README.rst
2 parents c3dd9a2 + 30eb511 commit 9de4e1b

File tree

16 files changed

+119
-27
lines changed

16 files changed

+119
-27
lines changed

packages/openneuro-app/src/scripts/dataset/files/file-viewer-type.jsx

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import FileViewerCsv from "./viewers/file-viewer-csv.jsx"
88
import FileViewerHtml from "./viewers/file-viewer-html.jsx"
99
import { FileViewerNeurosift } from "./viewers/file-viewer-neurosift"
1010
import { isNifti } from "./file-types"
11+
import FileViewerMarkdown from "./viewers/file-viewer-markdown"
1112

1213
/**
1314
* Choose the right viewer for each file type
@@ -18,13 +19,16 @@ const FileViewerType = ({ path, url, data }) => {
1819
path.endsWith("CHANGES") ||
1920
path.endsWith(".bidsignore") ||
2021
path.endsWith(".gitignore") ||
21-
path.endsWith(".txt")
22+
path.endsWith(".txt") ||
23+
path.endsWith(".rst")
2224
) {
2325
return <FileViewerText data={data} />
2426
} else if (
2527
isNifti(path)
2628
) {
2729
return <FileViewerNifti imageUrl={url} />
30+
} else if (path.endsWith(".md")) {
31+
return <FileViewerMarkdown data={data} />
2832
} else if (path.endsWith(".json")) {
2933
return <FileViewerJson data={data} />
3034
} else if (path.endsWith(".tsv")) {
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
import React from "react"
2+
import { Markdown } from "../../../utils/markdown"
3+
4+
interface FileViewerMarkdownProps {
5+
data: ArrayBuffer
6+
}
7+
8+
const FileViewerMarkdown = ({ data }: FileViewerMarkdownProps) => {
9+
const decoder = new TextDecoder()
10+
return <Markdown>{decoder.decode(data)}</Markdown>
11+
}
12+
13+
export default FileViewerMarkdown

packages/openneuro-server/src/datalad/readme.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@ export const readme = (obj) => {
3434
})
3535
}
3636

37-
export const setReadme = (datasetId, readme, user) => {
38-
return addFileString(datasetId, "README", "text/plain", readme).then(() =>
37+
export const setReadme = (datasetId, readme, filename, user) => {
38+
return addFileString(datasetId, filename, "text/plain", readme).then(() =>
3939
commitFiles(datasetId, user)
4040
)
4141
}

packages/openneuro-server/src/graphql/resolvers/draft.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import { filterRemovedAnnexObjects } from "../utils/file.js"
1010
import { validation } from "./validation"
1111

1212
// A draft must have a dataset parent
13-
const draftFiles = async (dataset, args, { userInfo }) => {
13+
export const draftFiles = async (dataset, args, { userInfo }) => {
1414
const hexsha = await getDraftRevision(dataset.id)
1515
const files = await getFiles(dataset.id, args.tree || hexsha)
1616
return filterRemovedAnnexObjects(dataset.id, userInfo)(files)

packages/openneuro-server/src/graphql/resolvers/readme.ts

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,30 @@
55
import { setReadme } from "../../datalad/readme"
66
import { checkDatasetWrite } from "../permissions"
77
export { readme } from "../../datalad/readme"
8+
import { draftFiles } from "./draft"
89

9-
export const updateReadme = (obj, { datasetId, value }, { user, userInfo }) => {
10-
return checkDatasetWrite(datasetId, user, userInfo).then(() => {
11-
// Save to backend
12-
return setReadme(datasetId, value, userInfo).then(() => true)
10+
export async function updateReadme(
11+
obj,
12+
{ datasetId, value },
13+
{ user, userInfo },
14+
) {
15+
await checkDatasetWrite(datasetId, user, userInfo)
16+
const files = await draftFiles({ id: datasetId }, { tree: "HEAD" }, {
17+
userInfo,
1318
})
19+
// Default to README.md if none exists
20+
let filename = "README.md"
21+
for (const file of files) {
22+
if (
23+
file.filename === "README.md" || file.filename === "README.rst" ||
24+
file.filename === "README.txt" ||
25+
file.filename === "README"
26+
) {
27+
filename = file.filename
28+
break
29+
}
30+
}
31+
// Save to backend
32+
await setReadme(datasetId, value, filename, userInfo)
33+
return true
1434
}

services/datalad/datalad_service/common/bids.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
11
import json
22

3+
import pygit2
4+
35
from datalad_service.common.git import git_show
46

57

68
def read_dataset_description(dataset_path, commit):
79
try:
10+
repo = pygit2.Repository(dataset_path)
811
raw_description = git_show(
9-
dataset_path, commit, 'dataset_description.json')
12+
repo, commit, 'dataset_description.json')
1013
return json.loads(raw_description)
1114
except json.decoder.JSONDecodeError:
1215
return None

services/datalad/datalad_service/common/git.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import pathlib
12
import re
23
import subprocess
34

@@ -14,23 +15,32 @@ class OpenNeuroGitError(Exception):
1415
"""OpenNeuro git repo states that should not arise under normal use but may be a valid git operation in other contexts."""
1516

1617

17-
def git_show(path, committish, obj):
18-
repo = pygit2.Repository(path)
18+
def git_show(repo, committish, obj):
19+
"""Equivalent to `git show <committish>:<obj>` on `repo` repository."""
1920
commit, _ = repo.resolve_refish(committish)
2021
data_bytes = (commit.tree / obj).read_raw()
2122
result = from_bytes(data_bytes).best()
2223
return str(result)
2324

2425

25-
def git_show_object(path, obj):
26-
repo = pygit2.Repository(path)
26+
def git_show_object(repo, obj):
2727
git_obj = repo.get(obj)
2828
if git_obj:
2929
return git_obj.read_raw().decode()
3030
else:
3131
raise KeyError('object not found in repository')
3232

3333

34+
def git_tree(repo, committish, filepath):
35+
"""Retrieve the tree parent for a given commit and filename."""
36+
path = pathlib.Path(filepath)
37+
commit, _ = repo.resolve_refish(committish)
38+
tree = commit.tree
39+
for part in path.parts[:-1]:
40+
tree = tree / part
41+
return tree
42+
43+
3444
def delete_tag(path, tag):
3545
repo = pygit2.Repository(path)
3646
repo.references.delete(f'refs/tags/{tag}')

services/datalad/datalad_service/datalad.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
from os import path
22

3+
import pygit2
4+
35
class DataladStore:
46
"""Store for Datalad state accessed by resource handlers."""
57

68
def __init__(self, annex_path):
79
self.annex_path = annex_path
10+
self.repos = {}
811

912
def get_dataset_path(self, name):
1013
return path.join(self.annex_path, name)
@@ -13,3 +16,8 @@ def get_upload_path(self, dataset, upload):
1316
prefix_a = upload[0:2]
1417
prefix_b = upload[2:4]
1518
return path.join(self.annex_path, 'uploads', dataset, prefix_a, prefix_b, upload)
19+
20+
def get_dataset_repo(self, dataset):
21+
if dataset not in self.repos:
22+
self.repos[dataset] = pygit2.Repository(self.get_dataset_path(dataset))
23+
return self.repos[dataset]

services/datalad/datalad_service/handlers/files.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
import logging
22
import os
3+
from pathlib import Path
34

45
import falcon
56
import pygit2
67
import aiofiles
78

8-
from datalad_service.common.git import git_show
9+
from datalad_service.common.git import git_show, git_tree
910
from datalad_service.common.user import get_user_info
1011
from datalad_service.common.stream import update_file
1112
from datalad_service.tasks.files import remove_files
@@ -21,7 +22,16 @@ async def on_get(self, req, resp, dataset, filename, snapshot='HEAD'):
2122
ds_path = self.store.get_dataset_path(dataset)
2223
try:
2324
try:
24-
file_content = git_show(ds_path, snapshot, filename)
25+
repo = self.store.get_dataset_repo(dataset)
26+
tree = git_tree(repo, snapshot, filename)
27+
# Look for any files that only differ by extension
28+
path = Path(filename)
29+
if path.name not in tree:
30+
for obj in tree:
31+
if Path(obj.name).stem == path.name:
32+
filename = obj.name
33+
break
34+
file_content = git_show(repo, snapshot, filename)
2535
# If the file begins with an annex path, return that path
2636
if file_content[0:4096].find('.git/annex') != -1:
2737
# Resolve absolute path for annex target

services/datalad/datalad_service/handlers/objects.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ def __init__(self, store):
1212
self.logger = logging.getLogger('datalad_service.' + __name__)
1313

1414
async def on_get(self, req, resp, dataset, obj):
15-
ds_path = self.store.get_dataset_path(dataset)
15+
repo = self.store.get_dataset_repo(dataset)
1616
try:
1717
if len(obj) == 40:
18-
resp.text = git_show_object(ds_path, obj)
18+
resp.text = git_show_object(repo, obj)
1919
resp.status = falcon.HTTP_OK
2020
else:
2121
resp.media = {

0 commit comments

Comments
 (0)