Skip to content

Commit 5bec6b5

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent bbc4bd2 commit 5bec6b5

File tree

16 files changed

+63
-87
lines changed

16 files changed

+63
-87
lines changed

examples/mark_bad_channels.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@
118118
raw = read_raw_bids(bids_path=bids_path, verbose=False)
119119
print(
120120
f"The following channels are currently marked as bad:\n"
121-
f' {", ".join(raw.info["bads"])}\n'
121+
f" {', '.join(raw.info['bads'])}\n"
122122
)
123123

124124
# %%
@@ -137,7 +137,7 @@
137137
raw = read_raw_bids(bids_path=bids_path, verbose=False)
138138
print(
139139
f"After marking MEG 0112 and MEG 0131 as bad, the following channels "
140-
f'are now marked as bad:\n {", ".join(raw.info["bads"])}\n'
140+
f"are now marked as bad:\n {', '.join(raw.info['bads'])}\n"
141141
)
142142

143143
# %%
@@ -159,7 +159,7 @@
159159
print(
160160
f"After marking MEG 0112 and MEG 0131 as bad and passing "
161161
f"`overwrite=True`, the following channels "
162-
f'are now marked as bad:\n {", ".join(raw.info["bads"])}\n'
162+
f"are now marked as bad:\n {', '.join(raw.info['bads'])}\n"
163163
)
164164

165165
# %%
@@ -172,5 +172,5 @@
172172
raw = read_raw_bids(bids_path=bids_path, verbose=False)
173173
print(
174174
f"After passing `ch_names=[]` and `overwrite=True`, the following "
175-
f'channels are now marked as bad:\n {", ".join(raw.info["bads"])}\n'
175+
f"channels are now marked as bad:\n {', '.join(raw.info['bads'])}\n"
176176
)

mne_bids/commands/mne_bids_calibration_to_bids.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,7 @@ def run():
4444
opt, args = parser.parse_args()
4545
if args:
4646
parser.print_help()
47-
parser.error(
48-
f"Please do not specify arguments without flags. " f"Got: {args}.\n"
49-
)
47+
parser.error(f"Please do not specify arguments without flags. Got: {args}.\n")
5048

5149
if opt.bids_root is None:
5250
parser.print_help()

mne_bids/commands/mne_bids_inspect.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,7 @@ def run():
7272
opt, args = parser.parse_args()
7373
if args:
7474
parser.print_help()
75-
parser.error(
76-
f"Please do not specify arguments without flags. " f"Got: {args}.\n"
77-
)
75+
parser.error(f"Please do not specify arguments without flags. Got: {args}.\n")
7876

7977
if opt.bids_root is None:
8078
parser.print_help()

mne_bids/commands/mne_bids_mark_channels.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def run():
126126
return # XXX should be return with an error code?
127127

128128
logger.info(
129-
f'Marking channels {", ".join(ch_names)} as bad in '
129+
f"Marking channels {', '.join(ch_names)} as bad in "
130130
f"{len(bids_paths)} recording(s) …"
131131
)
132132
for bids_path in bids_paths:

mne_bids/config.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -530,8 +530,7 @@ def _map_options(what, key, fro, to):
530530
mapped_option = mapping_mne_bids.get(key, None)
531531
else:
532532
raise RuntimeError(
533-
f"fro value {fro} and to value {to} are not "
534-
"accepted. Use 'mne', or 'bids'."
533+
f"fro value {fro} and to value {to} are not accepted. Use 'mne', or 'bids'."
535534
)
536535

537536
return mapped_option

mne_bids/copyfiles.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def _get_brainvision_paths(vhdr_path):
8282
"""
8383
fname, ext = _parse_ext(vhdr_path)
8484
if ext != ".vhdr":
85-
raise ValueError(f'Expecting file ending in ".vhdr",' f" but got {ext}")
85+
raise ValueError(f'Expecting file ending in ".vhdr", but got {ext}')
8686

8787
# Header file seems fine
8888
# extract encoding from brainvision header file, or default to utf-8
@@ -96,14 +96,14 @@ def _get_brainvision_paths(vhdr_path):
9696
eeg_file_match = re.search(r"DataFile=(.*\.(eeg|dat))", " ".join(lines))
9797

9898
if not eeg_file_match:
99-
raise ValueError("Could not find a .eeg or .dat file link in" f" {vhdr_path}")
99+
raise ValueError(f"Could not find a .eeg or .dat file link in {vhdr_path}")
100100
else:
101101
eeg_file = eeg_file_match.groups()[0]
102102

103103
# Try to find marker file .vmrk
104104
vmrk_file_match = re.search(r"MarkerFile=(.*\.vmrk)", " ".join(lines))
105105
if not vmrk_file_match:
106-
raise ValueError("Could not find a .vmrk file link in" f" {vhdr_path}")
106+
raise ValueError(f"Could not find a .vmrk file link in {vhdr_path}")
107107
else:
108108
vmrk_file = vmrk_file_match.groups()[0]
109109

@@ -275,7 +275,7 @@ def _anonymize_brainvision(vhdr_file, date):
275275

276276
# Go through VHDR
277277
pattern = re.compile(r"^Impedance \[kOhm\] at \d\d:\d\d:\d\d :$")
278-
replace = f'at {date.strftime("%H:%M:%S")} :'
278+
replace = f"at {date.strftime('%H:%M:%S')} :"
279279
_replace_file(vhdr_file, pattern, replace)
280280

281281

@@ -544,7 +544,7 @@ def copyfile_eeglab(src, dest):
544544
fname_dest, ext_dest = _parse_ext(dest)
545545
if ext_src != ext_dest:
546546
raise ValueError(
547-
f"Need to move data with same extension" f" but got {ext_src}, {ext_dest}"
547+
f"Need to move data with same extension but got {ext_src}, {ext_dest}"
548548
)
549549

550550
# Load the EEG struct

mne_bids/dig.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def _handle_electrodes_reading(electrodes_fname, coord_frame, coord_unit):
4545
4646
Handle xyz coordinates and coordinate frame of each channel.
4747
"""
48-
logger.info("Reading electrode " f"coords from {electrodes_fname}.")
48+
logger.info(f"Reading electrode coords from {electrodes_fname}.")
4949
electrodes_dict = _from_tsv(electrodes_fname)
5050
ch_names_tsv = electrodes_dict["name"]
5151

@@ -142,7 +142,7 @@ def _write_electrodes_tsv(raw, fname, datatype, overwrite=False):
142142
x, y, z, names = list(), list(), list(), list()
143143
for ch in raw.info["chs"]:
144144
if ch["kind"] == FIFF.FIFFV_STIM_CH:
145-
logger.debug(f"Not writing stim chan {ch['ch_name']} " f"to electrodes.tsv")
145+
logger.debug(f"Not writing stim chan {ch['ch_name']} to electrodes.tsv")
146146
continue
147147
elif np.isnan(ch["loc"][:3]).any() or np.allclose(ch["loc"][:3], 0):
148148
x.append("n/a")

mne_bids/path.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -905,9 +905,9 @@ def fpath(self):
905905
msg = (
906906
"Found more than one matching data file for the "
907907
"requested recording. While searching:\n"
908-
f'{indent(repr(self), " ")}\n'
908+
f"{indent(repr(self), ' ')}\n"
909909
f"Found {len(matching_paths)} paths:\n"
910-
f'{indent(matching_paths_str, " ")}\n'
910+
f"{indent(matching_paths_str, ' ')}\n"
911911
"Cannot proceed due to the "
912912
"ambiguity. This is likely a problem with your "
913913
"BIDS dataset. Please run the BIDS validator on "
@@ -1005,7 +1005,7 @@ def update(self, *, check=None, **kwargs):
10051005

10061006
if key not in ENTITY_VALUE_TYPE:
10071007
raise ValueError(
1008-
f"Key must be one of " f"{ALLOWED_PATH_ENTITIES}, got {key}"
1008+
f"Key must be one of {ALLOWED_PATH_ENTITIES}, got {key}"
10091009
)
10101010

10111011
if ENTITY_VALUE_TYPE[key] == "label":
@@ -1152,7 +1152,7 @@ def _check(self):
11521152
allowed_spaces_for_dtype = ALLOWED_SPACES.get(datatype, None)
11531153
if allowed_spaces_for_dtype is None:
11541154
raise ValueError(
1155-
f"space entity is not valid for datatype " f"{self.datatype}"
1155+
f"space entity is not valid for datatype {self.datatype}"
11561156
)
11571157
elif space not in allowed_spaces_for_dtype:
11581158
raise ValueError(
@@ -1462,7 +1462,9 @@ def _truncate_tsv_line(line, lim=10):
14621462
"""Truncate a line to the specified number of characters."""
14631463
return "".join(
14641464
[
1465-
str(val) + (lim - len(val)) * " " if len(val) < lim else f"{val[:lim - 1]} "
1465+
str(val) + (lim - len(val)) * " "
1466+
if len(val) < lim
1467+
else f"{val[: lim - 1]} "
14661468
for val in line.split("\t")
14671469
]
14681470
)
@@ -1762,7 +1764,7 @@ def get_entities_from_fname(fname, on_error="raise", verbose=None):
17621764

17631765
if on_error in ("raise", "warn"):
17641766
if key not in fname_vals:
1765-
msg = f'Unexpected entity "{key}" found in ' f'filename "{fname}"'
1767+
msg = f'Unexpected entity "{key}" found in filename "{fname}"'
17661768
if on_error == "raise":
17671769
raise KeyError(msg)
17681770
elif on_error == "warn":
@@ -1864,10 +1866,7 @@ def _find_matching_sidecar(bids_path, suffix=None, extension=None, on_error="rai
18641866
# If this was expected, simply return None, otherwise, raise an exception.
18651867
msg = None
18661868
if len(best_candidates) == 0:
1867-
msg = (
1868-
f"Did not find any {search_suffix} "
1869-
f"associated with {bids_path.basename}."
1870-
)
1869+
msg = f"Did not find any {search_suffix} associated with {bids_path.basename}."
18711870
elif len(best_candidates) > 1:
18721871
# More than one candidates were tied for best match
18731872
msg = (
@@ -2085,7 +2084,7 @@ def get_entity_vals(
20852084

20862085
if entity_key not in entities:
20872086
raise ValueError(
2088-
f'`key` must be one of: {", ".join(entities)}. ' f"Got: {entity_key}"
2087+
f"`key` must be one of: {', '.join(entities)}. Got: {entity_key}"
20892088
)
20902089

20912090
ignore_subjects = _ensure_tuple(ignore_subjects)
@@ -2530,7 +2529,7 @@ def _return_root_paths(root, datatype=None, ignore_json=True, ignore_nosub=False
25302529

25312530
if datatype is not None:
25322531
datatype = _ensure_tuple(datatype)
2533-
search_str = f'*/{"|".join(datatype)}/*'
2532+
search_str = f"*/{'|'.join(datatype)}/*"
25342533
else:
25352534
search_str = "*.*"
25362535

mne_bids/read.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def _read_events(events, event_id, raw, bids_path=None):
126126
# retrieve events
127127
if isinstance(events, np.ndarray):
128128
if events.ndim != 2:
129-
raise ValueError("Events must have two dimensions, " f"found {events.ndim}")
129+
raise ValueError(f"Events must have two dimensions, found {events.ndim}")
130130
if events.shape[1] != 3:
131131
raise ValueError(
132132
"Events must have second dimension of length 3, "
@@ -164,7 +164,7 @@ def _read_events(events, event_id, raw, bids_path=None):
164164
f"The provided raw data contains annotations, but "
165165
f'"event_id" does not contain entries for all annotation '
166166
f"descriptions. The following entries are missing: "
167-
f'{", ".join(desc_without_id)}'
167+
f"{', '.join(desc_without_id)}"
168168
)
169169

170170
# If we have events, convert them to Annotations so they can be easily
@@ -174,7 +174,7 @@ def _read_events(events, event_id, raw, bids_path=None):
174174
if ids_without_desc:
175175
raise ValueError(
176176
f"No description was specified for the following event(s): "
177-
f'{", ".join([str(x) for x in sorted(ids_without_desc)])}. '
177+
f"{', '.join([str(x) for x in sorted(ids_without_desc)])}. "
178178
f"Please add them to the event_id dictionary, or drop them "
179179
f"from the events array."
180180
)
@@ -409,9 +409,7 @@ def _handle_scans_reading(scans_fname, raw, bids_path):
409409
# Convert time offset to UTC
410410
acq_time = acq_time.astimezone(timezone.utc)
411411

412-
logger.debug(
413-
f"Loaded {scans_fname} scans file to set " f"acq_time as {acq_time}."
414-
)
412+
logger.debug(f"Loaded {scans_fname} scans file to set acq_time as {acq_time}.")
415413
# First set measurement date to None and then call call anonymize() to
416414
# remove any traces of the measurement date we wish
417415
# to replace – it might lurk out in more places than just
@@ -728,7 +726,7 @@ def _handle_channels_reading(channels_fname, raw):
728726
if ch_diff:
729727
warn(
730728
f"Cannot set channel type for the following channels, as they "
731-
f'are missing in the raw data: {", ".join(sorted(ch_diff))}'
729+
f"are missing in the raw data: {', '.join(sorted(ch_diff))}"
732730
)
733731
raw.set_channel_types(
734732
channel_type_bids_mne_map_available_channels, on_unit_change="ignore"
@@ -744,7 +742,7 @@ def _handle_channels_reading(channels_fname, raw):
744742
warn(
745743
f'Cannot set "bad" status for the following channels, as '
746744
f"they are missing in the raw data: "
747-
f'{", ".join(sorted(ch_diff))}'
745+
f"{', '.join(sorted(ch_diff))}"
748746
)
749747

750748
raw.info["bads"] = bads_avail
@@ -875,7 +873,7 @@ def read_raw_bids(
875873
and raw_path.is_symlink()
876874
):
877875
target_path = raw_path.resolve()
878-
logger.info(f"Resolving symbolic link: " f"{raw_path} -> {target_path}")
876+
logger.info(f"Resolving symbolic link: {raw_path} -> {target_path}")
879877
raw_path = target_path
880878
config_path = None
881879

@@ -1124,7 +1122,7 @@ def get_head_mri_trans(
11241122

11251123
if t1w_json_path is None or not t1w_json_path.exists():
11261124
raise FileNotFoundError(
1127-
f"Did not find T1w JSON sidecar file, tried location: " f"{t1w_json_path}"
1125+
f"Did not find T1w JSON sidecar file, tried location: {t1w_json_path}"
11281126
)
11291127
for extension in (".nii", ".nii.gz"):
11301128
t1w_path_candidate = t1w_json_path.with_suffix(extension)
@@ -1135,7 +1133,7 @@ def get_head_mri_trans(
11351133
if not t1w_bids_path.fpath.exists():
11361134
raise FileNotFoundError(
11371135
f"Did not find T1w recording file, tried location: "
1138-
f'{t1w_path_candidate.name.replace(".nii.gz", "")}[.nii, .nii.gz]'
1136+
f"{t1w_path_candidate.name.replace('.nii.gz', '')}[.nii, .nii.gz]"
11391137
)
11401138

11411139
# Get MRI landmarks from the JSON sidecar

mne_bids/report/_report.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ def _summarize_scans(root, session=None):
260260
if session is None:
261261
search_str = "*_scans.tsv"
262262
else:
263-
search_str = f"*ses-{session}" f"*_scans.tsv"
263+
search_str = f"*ses-{session}*_scans.tsv"
264264
scans_fpaths = list(root.rglob(search_str))
265265
if len(scans_fpaths) == 0:
266266
warn(

0 commit comments

Comments
 (0)