Skip to content

Commit 96bf37a

Browse files
Merge pull request #499 from NeurodataWithoutBorders/use_dev_dandi
Use latest dandi dev branch
2 parents f084e15 + f71825e commit 96bf37a

File tree

7 files changed

+20
-13
lines changed

7 files changed

+20
-13
lines changed

environments/environment-Windows.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,5 +18,6 @@ dependencies:
1818
- flask-cors === 3.0.10
1919
- flask_restx == 1.1.0
2020
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full]
21+
- dandi >= 0.57.0
2122
- pytest == 7.2.2
2223
- pytest-cov == 4.1.0

nwb-guide.spec

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,12 @@ datas = [('./paths.config.json', '.'), ('./package.json', '.')]
1111
binaries = []
1212
hiddenimports = ['scipy._distributor_init', 'scipy._lib.messagestream', 'scipy._lib._ccallback', 'scipy._lib._testutils', 'email_validator']
1313
datas += collect_data_files('jsonschema_specifications')
14+
tmp_ret = collect_all('dandi')
15+
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
16+
tmp_ret = collect_all('keyrings')
17+
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
18+
tmp_ret = collect_all('unittest')
19+
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
1420
tmp_ret = collect_all('nwbinspector')
1521
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
1622
tmp_ret = collect_all('neuroconv')
@@ -19,6 +25,8 @@ tmp_ret = collect_all('pynwb')
1925
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
2026
tmp_ret = collect_all('hdmf')
2127
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
28+
tmp_ret = collect_all('hdmf_zarr')
29+
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
2230
tmp_ret = collect_all('ndx_dandi_icephys')
2331
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
2432
tmp_ret = collect_all('ci_info')

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
"build:mac": "npm run build && npm run build:flask && npm run build:electron:mac",
2121
"build:linux": "npm run build && npm run build:flask && npm run build:electron:linux",
2222
"build:flask": "python -m PyInstaller nwb-guide.spec --log-level DEBUG --clean --noconfirm --distpath ./build/flask",
23-
"build:flask:spec:base": "pyi-makespec --name nwb-guide --onedir --collect-data jsonschema_specifications --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all ndx_dandi_icephys --collect-all ci_info --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils --hidden-import email_validator ./pyflask/app.py",
23+
"build:flask:spec:base": "pyi-makespec --name nwb-guide --onedir --collect-data jsonschema_specifications --collect-all dandi --collect-all keyrings --collect-all unittest --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all hdmf_zarr --collect-all ndx_dandi_icephys --collect-all ci_info --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils --hidden-import email_validator ./pyflask/app.py",
2424
"build:flask:spec": "npm run build:flask:spec:base && python prepare_pyinstaller_spec.py",
2525
"build:electron:win": "electron-builder build --win --publish never",
2626
"build:electron:mac": "electron-builder build --mac --publish never",

pyflask/apis/neuroconv.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
inspect_nwb_file,
1818
inspect_nwb_folder,
1919
inspect_multiple_filesystem_objects,
20-
upload_to_dandi,
20+
upload_project_to_dandi,
2121
upload_folder_to_dandi,
2222
upload_multiple_filesystem_objects_to_dandi,
2323
)
@@ -123,19 +123,19 @@ def post(self):
123123

124124

125125
@neuroconv_api.route("/upload/project")
126-
class Upload(Resource):
126+
class UploadProject(Resource):
127127
@neuroconv_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"})
128128
def post(self):
129129
try:
130-
return upload_to_dandi(**neuroconv_api.payload)
130+
return upload_project_to_dandi(**neuroconv_api.payload)
131131

132132
except Exception as e:
133133
if notBadRequestException(e):
134134
neuroconv_api.abort(500, str(e))
135135

136136

137137
@neuroconv_api.route("/upload/folder")
138-
class Upload(Resource):
138+
class UploadFolder(Resource):
139139
@neuroconv_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"})
140140
def post(self):
141141
try:

pyflask/manageNeuroconv/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
get_metadata_schema,
77
convert_to_nwb,
88
validate_metadata,
9-
upload_to_dandi,
9+
upload_project_to_dandi,
1010
upload_folder_to_dandi,
1111
upload_multiple_filesystem_objects_to_dandi,
1212
listen_to_neuroconv_events,

pyflask/manageNeuroconv/manage_neuroconv.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -169,8 +169,6 @@ def get_all_converter_info() -> dict:
169169

170170
return {name: derive_interface_info(converter) for name, converter in module_to_dict(converters).items()}
171171

172-
return output
173-
174172

175173
def get_all_interface_info() -> dict:
176174
"""Format an information structure to be used for selecting interfaces based on modality and technique."""
@@ -470,7 +468,7 @@ def update_conversion_progress(**kwargs):
470468

471469

472470
def upload_multiple_filesystem_objects_to_dandi(**kwargs):
473-
tmp_folder_path = aggregate_symlinks_in_new_directory(kwargs["filesystem_paths"], "upload")
471+
tmp_folder_path = _aggregate_symlinks_in_new_directory(kwargs["filesystem_paths"], "upload")
474472
innerKwargs = {**kwargs}
475473
del innerKwargs["filesystem_paths"]
476474
innerKwargs["nwb_folder_path"] = tmp_folder_path
@@ -502,7 +500,7 @@ def upload_folder_to_dandi(
502500
)
503501

504502

505-
def upload_to_dandi(
503+
def upload_project_to_dandi(
506504
dandiset_id: str,
507505
api_key: str,
508506
project: Optional[str] = None,

schemas/json/dandi/upload.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,15 @@
77
"number_of_jobs": {
88
"type": "integer",
99
"title": "Job Count",
10-
"description": "The number of files to upload in parallel. A value of <code>-1</code> uses all available processes",
11-
"default": 1,
10+
"description": "The number of files to upload in parallel. A value of <code>-1</code> uses all available processes.<br><small><b>Note:</b> If you encounter an error for any selector value, particularly a Segmentation Fault error, try a value of <code>1</code> to run the jobs without parallelization.</small>",
11+
"default": -1,
1212
"min": -1
1313
},
1414
"number_of_threads": {
1515
"type": "integer",
1616
"title": "Threads per Job",
1717
"description": "The number of threads to handle each file. A value of <code>-1</code> uses all available threads per process.",
18-
"default": 1,
18+
"default": -1,
1919
"min": -1
2020
},
2121
"cleanup": {

0 commit comments

Comments
 (0)