Skip to content

Commit a7b4b36

Browse files
authored
Merge pull request #45 from s-ccs/feature-new_test
Feature new test / slight refactor / new CLI
2 parents 862bf61 + d7c1dd5 commit a7b4b36

40 files changed

+400
-130
lines changed

.github/workflows/pytest.yml

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
name: Run Tests
2+
3+
on:
4+
push:
5+
branches: [ main ]
6+
pull_request:
7+
branches: [ main ]
8+
9+
jobs:
10+
test:
11+
runs-on: ubuntu-latest
12+
13+
steps:
14+
- name: Checkout code
15+
uses: actions/checkout@v5
16+
17+
- name: Install uv
18+
uses: astral-sh/setup-uv@v7
19+
20+
- name: Set up Python
21+
run: uv python install
22+
23+
- name: Install dependencies with uv
24+
run: uv sync --all-extras --dev
25+
- name: install local
26+
run: uv pip install --editable ./
27+
28+
- name: Run tests with uv
29+
run: uv run pytest

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ jsonschema
77
.DS_Store
88
empty_log_process_temp.py
99

10+
tests/**/bids/
11+
tests/test_main_functionality/data/projects/test-project/sub-100
1012
# Byte-compiled / optimized / DLL files
1113
__pycache__/
1214
*.py[cod]

docs/configuration.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@ lslautobids gen-dv-config
1717
```
1818
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions and can be easily adapted_
1919

20-
However for testing purposes, we create a separate test configuration file `~/.config/lslautobids/test-autobids_config.yaml` which is used when running the tests.
21-
2220
#### Project Configuration (`gen_project_config.py`)
2321
This module generates a project-specific configuration file in TOML format. This file is stored in the `projects/<PROJECT_NAME>/<PROJECT_NAME>_config.toml` file and contains:
2422
- Project metadata: Title, description, license, and authors, etc.

docs/developers_documentation.md

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,9 +92,7 @@ The command to generate the dataverse configuration file is:
9292
```
9393
lslautobids gen-dv-config
9494
```
95-
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions and can be easily adapted_
96-
97-
However for testing purposes, we create a separate test configuration file `~/.config/lslautobids/test-autobids_config.yaml` which is used when running the tests.
95+
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions.
9896

9997
#### 2. Project Configuration (`gen_project_config.py`)
10098
This module generates a project-specific configuration file in TOML format. This file is stored in the `projects/<PROJECT_NAME>/<PROJECT_NAME>_config.toml` file and contains:

docs/testing.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ Tests will be added continuously as new features are added and existing features
1414

1515
### Running Tests
1616

17-
To run the tests, navigate to the `tests/` directory and execute:
18-
`python tests/run_all_tests.py`
17+
To run the tests, we recommend to use `uv run pytest` (caveat, for some reason sometimes tests fail if they are all run at the same time. you can then run them via `uv run pytest tests/testcase/test_main_functionality` and they will work).
1918

2019
These tests ensure that each component functions as expected and that the overall pipeline works seamlessly. This tests will also be triggered automatically on each push or PR to the main repository using GitHub Actions.

lslautobids/config_globals.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ def __init__(self):
1313
"redo_bids_conversion": False,
1414
"reupload": False,
1515
"redo_other_pc": False,
16+
"push_to_dataverse": True,
1617
}
1718

1819
def init(self, args):
@@ -55,7 +56,7 @@ def parse_yaml_file(yaml_file):
5556

5657
# Determine config paty based on context
5758
if "pytest" in sys.modules:
58-
config_file = os.path.join(os.path.expanduser("~"), ".config/lslautobids/test-autobids_config.yaml")
59+
config_file = "tests/pytest-autobids_config.yaml"
5960
else:
6061
config_file = os.path.join(os.path.expanduser("~"), ".config/lslautobids/autobids_config.yaml")
6162
config = parse_yaml_file(config_file)

lslautobids/convert_to_bids_and_upload.py

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -364,7 +364,7 @@ def convert_to_bids(self, xdf_path,subject_id,session_id, run_id, task_id,other,
364364
# Validate BIDS data
365365
logger.info("Validating BIDS data...")
366366
# Validate the BIDS data
367-
val = self.validate_bids(bids_root+project_name,subject_id,session_id, logger)
367+
val = self.validate_bids(os.path.join(bids_root,project_name),subject_id,session_id, logger)
368368
return val
369369

370370
def validate_bids(self,bids_path,subject_id,session_id, logger):
@@ -482,26 +482,29 @@ def bids_process_and_upload(processed_files,logger):
482482
bids.populate_dataset_description_json(project_name, logger)
483483
logger.info('Generating metadatafiles........')
484484
generate_json_file(project_name, logger)
485-
logger.info('Generating dataverse dataset........')
486485

487-
doi, status = create_dataverse(project_name)
488486

489-
logger.info("Creating and adding files to Dataverse dataset...")
490-
create_and_add_files_to_datalad_dataset(bids_root+project_name,status, logger)
487+
logger.info("Creating and adding files to Datalad dataset...")
488+
create_and_add_files_to_datalad_dataset(os.path.join(bids_root,project_name),logger)
491489

492-
if status == 0:
493-
logger.info('Linking dataverse dataset with datalad')
494-
add_sibling_dataverse_in_folder(doi, logger)
495-
496-
if cli_args.yes:
497-
logger.info('Pushing files to dataverse........')
498-
push_files_to_dataverse(project_name, logger)
499-
else:
500-
user_input = get_user_input("Do you want to push the files to Dataverse? ",logger)
490+
if cli_args.push_to_dataverse:
491+
logger.info('Generating dataverse dataset........')
492+
doi, status = create_dataverse(project_name, logger)
493+
if status == 0: # run only if a new dataverse was created
494+
logger.info('Linking dataverse dataset with datalad')
495+
add_sibling_dataverse_in_folder(doi, logger)
496+
497+
if cli_args.yes:
498+
user_input = "y"
499+
else:
500+
user_input = get_user_input("Do you want to push the files to Dataverse? ",logger)
501+
501502
if user_input == "y":
502503
logger.info('Pushing files to dataverse........')
503504
push_files_to_dataverse(project_name, logger)
504505
elif user_input == "n":
505506
logger.info("Program aborted.")
506507
else:
507508
logger.error("Invalid Input.")
509+
else:
510+
logger.info('cli.push_to_dataverse was false, not pushing.')

lslautobids/datalad_create.py

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,29 +2,33 @@
22
import os
33

44

5-
def create_and_add_files_to_datalad_dataset(dataset_path,flag, logger):
5+
def create_and_add_files_to_datalad_dataset(dataset_path,logger):
66
message = "LSL Auto BIDS: new files found and added"
7-
if flag==0:
7+
#if flag==0:
8+
9+
try:
10+
dl.Dataset(dataset_path)
11+
except:
812
message ="LSL Auto BIDS: new datalad dataset created"
913
# Create a new dataset
1014
logger.info('Creating a new datalad dataset........')
1115
try:
1216
dl.create(dataset_path, force=True) # files already exist, so we eforce it
17+
18+
# make sure only large files are saved
19+
with open(os.path.join(dataset_path,".gitattributes"), "a") as f:
20+
f.write("* annex.largefiles=largerthan=100kb")
21+
f.write("\n*.csv annex.largefiles=nothing")
22+
f.write("\n*.log annex.largefiles=nothing")
23+
f.write("\n*.tsv annex.largefiles=nothing")
24+
f.write("\n*.md annex.largefiles=nothing")
25+
f.write("\n*.json annex.largefiles=nothing")
26+
1327
except:
1428
logger.info("Could not create a new dataset, maybe it exists already?")
1529

16-
# Commit changes
17-
# Change to dataset path
18-
os.chdir(dataset_path)
19-
if flag==0:
20-
# needed to modify participants.tsv etc. later
21-
with open(os.path.join(dataset_path,".gitattributes"), "a") as f:
22-
f.write("* annex.largefiles=largerthan=100kb")
23-
f.write("\n*.csv annex.largefiles=nothing")
24-
f.write("\n*.log annex.largefiles=nothing")
25-
f.write("\n*.tsv annex.largefiles=nothing")
26-
f.write("\n*.md annex.largefiles=nothing")
27-
f.write("\n*.json annex.largefiles=nothing")
30+
31+
# commit current files
2832
logger.info('Committing current changes........')
29-
dl.save(path = '.', message=message)
33+
dl.save(path = dataset_path, message=message)
3034

lslautobids/dataverse_dataset_create.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
logger = logging.getLogger(__name__)
1313
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
1414

15-
def create_dataverse(project_name):
15+
def create_dataverse(project_name, logger):
1616
"""
1717
Creates a Dataverse dataset and returns the PID and dataset ID.
1818

pyproject.toml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,11 @@ build-backend = "setuptools.build_meta"
66
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
77
skip = '.git*,*.svg,*.bib'
88
check-hidden = true
9+
10+
[dependency-groups]
11+
dev = [
12+
"git-annex>=10.20251114",
13+
"pytest>=9.0.1",
14+
]
915
# ignore-regex = ''
1016
# ignore-words-list = ''

0 commit comments

Comments
 (0)