Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Run Tests

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
test:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v5

- name: Install uv
uses: astral-sh/setup-uv@v7

- name: Set up Python
run: uv python install

- name: Install dependencies with uv
run: uv sync --all-extras --dev
- name: install local
run: uv pip install --editable ./

- name: Run tests with uv
run: uv run pytest
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ jsonschema
.DS_Store
empty_log_process_temp.py

tests/**/bids/
tests/test_main_functionality/data/projects/test-project/sub-100
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
2 changes: 0 additions & 2 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ lslautobids gen-dv-config
```
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions and can be easily adapted_

However for testing purposes, we create a separate test configuration file `~/.config/lslautobids/test-autobids_config.yaml` which is used when running the tests.

#### Project Configuration (`gen_project_config.py`)
This module generates a project-specific configuration file in TOML format. This file is stored in the `projects/<PROJECT_NAME>/<PROJECT_NAME>_config.toml` file and contains:
- Project metadata: Title, description, license, and authors, etc.
Expand Down
4 changes: 1 addition & 3 deletions docs/developers_documentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,7 @@ The command to generate the dataverse configuration file is:
```
lslautobids gen-dv-config
```
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions and can be easily adapted_

However for testing purposes, we create a separate test configuration file `~/.config/lslautobids/test-autobids_config.yaml` which is used when running the tests.
_Currently, the package doesn't allow you to have multiple dataverse configurations. This will be added in future versions.

#### 2. Project Configuration (`gen_project_config.py`)
This module generates a project-specific configuration file in TOML format. This file is stored in the `projects/<PROJECT_NAME>/<PROJECT_NAME>_config.toml` file and contains:
Expand Down
3 changes: 1 addition & 2 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ Tests will be added continuously as new features are added and existing features

### Running Tests

To run the tests, navigate to the `tests/` directory and execute:
`python tests/run_all_tests.py`
To run the tests, we recommend to use `uv run pytest` (caveat, for some reason sometimes tests fail if they are all run at the same time. you can then run them via `uv run pytest tests/testcase/test_main_functionality` and they will work).

These tests ensure that each component functions as expected and that the overall pipeline works seamlessly. This tests will also be triggered automatically on each push or PR to the main repository using GitHub Actions.
3 changes: 2 additions & 1 deletion lslautobids/config_globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def __init__(self):
"redo_bids_conversion": False,
"reupload": False,
"redo_other_pc": False,
"push_to_dataverse": True,
}

def init(self, args):
Expand Down Expand Up @@ -55,7 +56,7 @@ def parse_yaml_file(yaml_file):

# Determine config paty based on context
if "pytest" in sys.modules:
config_file = os.path.join(os.path.expanduser("~"), ".config/lslautobids/test-autobids_config.yaml")
config_file = "tests/pytest-autobids_config.yaml"
else:
config_file = os.path.join(os.path.expanduser("~"), ".config/lslautobids/autobids_config.yaml")
config = parse_yaml_file(config_file)
Expand Down
31 changes: 17 additions & 14 deletions lslautobids/convert_to_bids_and_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def convert_to_bids(self, xdf_path,subject_id,session_id, run_id, task_id,other,
# Validate BIDS data
logger.info("Validating BIDS data...")
# Validate the BIDS data
val = self.validate_bids(bids_root+project_name,subject_id,session_id, logger)
val = self.validate_bids(os.path.join(bids_root,project_name),subject_id,session_id, logger)
return val

def validate_bids(self,bids_path,subject_id,session_id, logger):
Expand Down Expand Up @@ -482,26 +482,29 @@ def bids_process_and_upload(processed_files,logger):
bids.populate_dataset_description_json(project_name, logger)
logger.info('Generating metadatafiles........')
generate_json_file(project_name, logger)
logger.info('Generating dataverse dataset........')

doi, status = create_dataverse(project_name)

logger.info("Creating and adding files to Dataverse dataset...")
create_and_add_files_to_datalad_dataset(bids_root+project_name,status, logger)
logger.info("Creating and adding files to Datalad dataset...")
create_and_add_files_to_datalad_dataset(os.path.join(bids_root,project_name),logger)

if status == 0:
logger.info('Linking dataverse dataset with datalad')
add_sibling_dataverse_in_folder(doi, logger)

if cli_args.yes:
logger.info('Pushing files to dataverse........')
push_files_to_dataverse(project_name, logger)
else:
user_input = get_user_input("Do you want to push the files to Dataverse? ",logger)
if cli_args.push_to_dataverse:
logger.info('Generating dataverse dataset........')
doi, status = create_dataverse(project_name, logger)
if status == 0: # run only if a new dataverse was created
logger.info('Linking dataverse dataset with datalad')
add_sibling_dataverse_in_folder(doi, logger)

if cli_args.yes:
user_input = "y"
else:
user_input = get_user_input("Do you want to push the files to Dataverse? ",logger)

if user_input == "y":
logger.info('Pushing files to dataverse........')
push_files_to_dataverse(project_name, logger)
elif user_input == "n":
logger.info("Program aborted.")
else:
logger.error("Invalid Input.")
else:
logger.info('cli.push_to_dataverse was false, not pushing.')
34 changes: 19 additions & 15 deletions lslautobids/datalad_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,29 +2,33 @@
import os


def create_and_add_files_to_datalad_dataset(dataset_path,flag, logger):
def create_and_add_files_to_datalad_dataset(dataset_path,logger):
message = "LSL Auto BIDS: new files found and added"
if flag==0:
#if flag==0:

try:
dl.Dataset(dataset_path)
except:
message ="LSL Auto BIDS: new datalad dataset created"
# Create a new dataset
logger.info('Creating a new datalad dataset........')
try:
dl.create(dataset_path, force=True) # files already exist, so we eforce it

# make sure only large files are saved
with open(os.path.join(dataset_path,".gitattributes"), "a") as f:
f.write("* annex.largefiles=largerthan=100kb")
f.write("\n*.csv annex.largefiles=nothing")
f.write("\n*.log annex.largefiles=nothing")
f.write("\n*.tsv annex.largefiles=nothing")
f.write("\n*.md annex.largefiles=nothing")
f.write("\n*.json annex.largefiles=nothing")

except:
logger.info("Could not create a new dataset, maybe it exists already?")

# Commit changes
# Change to dataset path
os.chdir(dataset_path)
if flag==0:
# needed to modify participants.tsv etc. later
with open(os.path.join(dataset_path,".gitattributes"), "a") as f:
f.write("* annex.largefiles=largerthan=100kb")
f.write("\n*.csv annex.largefiles=nothing")
f.write("\n*.log annex.largefiles=nothing")
f.write("\n*.tsv annex.largefiles=nothing")
f.write("\n*.md annex.largefiles=nothing")
f.write("\n*.json annex.largefiles=nothing")

# commit current files
logger.info('Committing current changes........')
dl.save(path = '.', message=message)
dl.save(path = dataset_path, message=message)

2 changes: 1 addition & 1 deletion lslautobids/dataverse_dataset_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')

def create_dataverse(project_name):
def create_dataverse(project_name, logger):
"""
Creates a Dataverse dataset and returns the PID and dataset ID.

Expand Down
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,11 @@ build-backend = "setuptools.build_meta"
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
skip = '.git*,*.svg,*.bib'
check-hidden = true

[dependency-groups]
dev = [
"git-annex>=10.20251114",
"pytest>=9.0.1",
]
# ignore-regex = ''
# ignore-words-list = ''
59 changes: 16 additions & 43 deletions ...tcases/test_old_suffix/test_old_suffix.py → tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,12 @@
import os
import sys
import pytest
import yaml


# Compute project root (two levels up from current test.py)
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
import sys,os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
import yaml

from test_utils.path_config import get_root_paths
from path_config import get_root_paths

# Print test file name for traceability
test_file_name = os.path.basename(__file__)
print(f" Running tests in {test_file_name}")

# Dummy CLI argument simulation
class DummyCLIArgs:
Expand All @@ -22,11 +15,11 @@ def __init__(self):
self.yes = True
self.redo_bids_conversion = False
self.redo_other_pc = False
self.push_to_dataverse = False
def init(self, args):
# you can store the args or ignore
pass


@pytest.fixture(scope="function")
def setup_project(monkeypatch):
"""
Expand All @@ -38,13 +31,14 @@ def setup_project(monkeypatch):
project_name = dummy_cli_args.project_name

# Ensure directory exists
os.makedirs(paths["project_root"], exist_ok=True)
#os.makedirs(paths["project_root"], exist_ok=True)

from lslautobids.gen_project_config import main as gen_project_config_main

# Create dummy user config for the test
config_file_test = os.path.join(os.path.expanduser("~"),'.config/lslautobids/test-autobids_config.yaml')
os.makedirs(os.path.dirname(config_file_test), exist_ok=True)
config_file_test = ("tests/pytest-autobids_config.yaml")

#os.makedirs(os.path.dirname(config_file_test), exist_ok=True)
config_data = {
"PROJECT_ROOT": paths["project_root"],
"BIDS_ROOT": paths["bids_root"],
Expand All @@ -60,6 +54,10 @@ def setup_project(monkeypatch):
monkeypatch.setattr("lslautobids.config_globals.project_root", paths["project_root"])
monkeypatch.setattr("lslautobids.config_globals.bids_root", paths["bids_root"])
monkeypatch.setattr("lslautobids.config_globals.project_other_root", paths["project_other_root"])
monkeypatch.setattr("lslautobids.config_globals.dataverse_base_url","https://demodarus.izus.uni-stuttgart.de/")
monkeypatch.setattr("lslautobids.config_globals.api_key","8b6c479e-e85b-4edb-9b8a-5305a9976875")
monkeypatch.setattr("lslautobids.config_globals.parent_dataverse_name","s-ccs")
#monkeypatch.setattr("lslautobids.config_globals.parent_dataverse_name","Institute for Visualization and Interactive Systems")
monkeypatch.setattr("lslautobids.config_globals.cli_args", dummy_cli_args)
monkeypatch.setattr("lslautobids.config_globals.config_file", config_file_test)

Expand All @@ -72,32 +70,7 @@ def setup_project(monkeypatch):

gen_project_config_main()

return paths, project_name


@pytest.mark.filterwarnings("ignore::DeprecationWarning")
def test_process_new_files_with_old_suffix(setup_project, monkeypatch):
"""
Expect the main pipeline to raise RuntimeError when duplicate files are found.
"""
paths, project_name = setup_project

project_toml_path = os.path.join(paths["project_root"], project_name, f"{project_name}_config.toml")

# Reset sys.argv to something that lslautobids.main.main() expects
sys.argv = [
"lslautobids.main",
"-p", project_name,
# other args expected by lslautobids.main.main
]

dummy_cli_args = DummyCLIArgs()
monkeypatch.setattr("lslautobids.config_globals.cli_args", dummy_cli_args)

# Import and run main pipeline, expect a RuntimeError
from lslautobids.main import main as runlslautobids
with pytest.raises(SystemExit, match="Duplicate file detected. Please check the file manually."):
runlslautobids()


yield project_name

# crashdown
#os.remove(config_file_test)
27 changes: 27 additions & 0 deletions tests/data/projects/test-project/test-project_config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

# This is the project configuration file - This configuration can be customized for each project

[AuthorsInfo]
authors = "John Doe, Lina Doe" # List of authors separated by commas
affiliation = "University of Stuttgart, University of Stuttgart" # Affiliation of the authors in the same order as authors
email = "[email protected]" # Contact email of the authors in the same order as authors

[DataverseDataset]
title = "Convert XDF to BIDS" # Title of the Dataverse dataset. This gets updated automatically by the project name.
datasetDescription = "This is a test project to set up the pipeline to convert XDF to BIDS." # Description of the dataset. This description will appear in the dataset.json file which then eventually gets displayed in the dataverse metadata
license = "MIT License" # License for the dataset, e.g. "CC0", "CC-BY-4.0", "ODC-By-1.0", "PDDL-1.0", "ODC-PDDL-1.0", "MIT License"
subject = ["Medicine, Health and Life Sciences","Engineering"] # List of subjects related to the dataset required for dataverse metadata
pid = '' # Persistent identifier for the dataset, e.g. DOI or Handle. This will be updated automatically after creating the dataset in dataverse.

[OtherFilesInfo]
otherFilesUsed = true # Set to true if you want to include other (non-eeg-files) files (experiment files, other modalities like eye tracking) in the dataset, else false
expectedOtherFiles = [".edf", ".csv", "_labnotebook.tsv", "_participantform.tsv"] # List of expected other file extensions. Only the expected files will be copied to the beh folder in BIDS dataset. Give an empty list [] if you don't want any other files to be in the dataset. In this case only experiment files will be zipeed and copied to the misc folder in BIDS dataset.

[FileSelection]
ignoreSubjects = ['sub-777'] # List of subjects to ignore during the conversion - Leave empty to include all subjects. Changing this value will not delete already existing subjects.
excludeTasks = ['sampletask'] # List of tasks to exclude from the conversion for all subjects - Leave empty to include all tasks. Changing this value will not delete already existing tasks.

[BidsConfig]
anonymizationNumber = 123 # This is an anomization number that will be added to the recording date of all subjects.


35 changes: 35 additions & 0 deletions tests/path_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os

def get_root_paths(test_file: str):
"""
Given a test file (__file__), return relevant test root paths.
"""
# Use the test_file argument, not __file__ from path_config.py
test_folder = os.path.basename(os.path.dirname(test_file))

# Go up to the test folder's path and into its `data/` directory
base_dir = os.path.abspath(os.path.join(os.path.dirname(test_file), "data"))

print(f'The base dir in the get_roots_path function is "{base_dir}"')
return {
"project_root": os.path.join(base_dir, "projects"),
"bids_root": os.path.join(base_dir, "bids"),
"project_other_root": os.path.join(base_dir, "project_other"),
}



def monkeypatch_paths(monkeypatch,paths):
monkeypatch.setattr("lslautobids.config_globals.project_root", paths["project_root"])
monkeypatch.setattr("lslautobids.convert_to_bids_and_upload.project_root", paths["project_root"])
monkeypatch.setattr("lslautobids.generate_dataset_json.project_root", paths["project_root"])
monkeypatch.setattr("lslautobids.main.project_root", paths["project_root"])
monkeypatch.setattr("lslautobids.processing_new_files.project_root", paths["project_root"])

monkeypatch.setattr("lslautobids.config_globals.bids_root", paths["bids_root"])
monkeypatch.setattr("lslautobids.convert_to_bids_and_upload.bids_root", paths["bids_root"])
monkeypatch.setattr("lslautobids.main.bids_root", paths["bids_root"])

monkeypatch.setattr("lslautobids.config_logger.bids_root", paths["bids_root"])
monkeypatch.setattr("lslautobids.config_globals.project_other_root", paths["project_other_root"])
monkeypatch.setattr("lslautobids.convert_to_bids_and_upload.project_other_root", paths["project_other_root"])
3 changes: 3 additions & 0 deletions tests/pytest-autobids_config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
BIDS_ROOT: /home/behinger/projects/LSLAutoBIDS/tests/data/bids
PROJECT_OTHER_ROOT: /home/behinger/projects/LSLAutoBIDS/tests/data/project_other
PROJECT_ROOT: /home/behinger/projects/LSLAutoBIDS/tests/data/projects
Loading