Skip to content

Commit d9334f5

Browse files
v-goncharenkoAliAbdulHussainSylvain Chevallier
authored
Adding motor imagery part of the Lee2019 dataset (rebase) (#170)
* Adding motor imagery part of the Lee2019 datasets * Correcting some length issues. * Improving documentation of Lee2019.py. Adding Lee2019_MI in dataset.rst. * pre-commit * removed useless option Co-authored-by: AliAbdulHussain <[email protected]> Co-authored-by: Sylvain Chevallier <[email protected]>
1 parent 51ab842 commit d9334f5

File tree

4 files changed

+138
-1
lines changed

4 files changed

+138
-1
lines changed

docs/source/conf.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@
7474
sphinx_gallery_conf = {
7575
"examples_dirs": ["../../examples", "../../tutorials"],
7676
"gallery_dirs": ["auto_examples", "auto_tutorials"],
77-
"backreferences_dir": False,
7877
}
7978

8079
# Add any paths that contain templates here, relative to this directory.

docs/source/datasets.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ Motor Imagery Datasets
2121
BNCI2015001
2222
BNCI2015004
2323
Cho2017
24+
Lee2019_MI
2425
MunichMI
2526
Ofner2017
2627
PhysionetMI

moabb/datasets/Lee2019.py

Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
"""
2+
BMI/OpenBMI dataset (Motor Imagery).
3+
"""
4+
5+
import numpy as np
6+
from mne import create_info
7+
from mne.channels import make_standard_montage
8+
from mne.io import RawArray
9+
from scipy.io import loadmat
10+
11+
from moabb.datasets import download as dl
12+
from moabb.datasets.base import BaseDataset
13+
14+
15+
Lee2019_URL = "ftp://parrot.genomics.cn/gigadb/pub/10.5524/100001_101000/100542/"
16+
17+
18+
class Lee2019_MI(BaseDataset):
19+
"""Motor Imagery BMI/OpenBMI dataset from BMI/OpenBMI dataset.
20+
21+
Dataset from Lee et al 2019 [1]_.
22+
23+
**Dataset Description**
24+
25+
EEG signals were recorded with a sampling rate of 1,000 Hz and
26+
collected with 62 Ag/AgCl electrodes. The EEG amplifier used
27+
in the experiment was a BrainAmp (Brain Products; Munich,
28+
Germany). The channels were nasion-referenced and grounded
29+
to electrode AFz. Additionally, an EMG electrode recorded from
30+
each flexor digitorum profundus muscle with the olecranon
31+
used as reference. The EEG/EMG channel configuration and
32+
indexing numbers are described in Fig. 1. The impedances of the
33+
EEG electrodes were maintained below 10 k during the entire
34+
experiment.
35+
36+
MI paradigm
37+
The MI paradigm was designed following a well-established system protocol.
38+
For all blocks, the first 3 s of each trial began
39+
with a black fixation cross that appeared at the center of the
40+
monitor to prepare subjects for the MI task. Afterwards, the subject
41+
performed the imagery task of grasping with the appropriate
42+
hand for 4 s when the right or left arrow appeared as a visual cue.
43+
After each task, the screen remained blank for 6 s (± 1.5 s). The
44+
experiment consisted of training and test phases; each phase
45+
had 100 trials with balanced right and left hand imagery tasks.
46+
During the online test phase, the fixation cross appeared at the
47+
center of the monitor and moved right or left, according to the
48+
real-time classifier output of the EEG signal.
49+
50+
References
51+
----------
52+
.. [1] Lee, M. H., Kwon, O. Y., Kim, Y. J., Kim, H. K., Lee, Y. E.,
53+
Williamson, J., … Lee, S. W. (2019). EEG dataset and OpenBMI
54+
toolbox for three BCI paradigms: An investigation into BCI
55+
illiteracy. GigaScience, 8(5), 1–16.
56+
https://doi.org/10.1093/gigascience/giz002
57+
"""
58+
59+
def __init__(self):
60+
super().__init__(
61+
subjects=list(range(1, 55)),
62+
sessions_per_subject=2,
63+
events=dict(left_hand=2, right_hand=1),
64+
code="Lee2019_MI",
65+
interval=[0, 4],
66+
paradigm="imagery",
67+
doi="10.5524/100542",
68+
)
69+
70+
def _get_single_subject_data(self, subject):
71+
"""return data for a single subejct"""
72+
73+
sessions = {}
74+
file_path_list = self.data_path(subject)
75+
76+
for session in range(1, 3):
77+
data = loadmat(file_path_list[session - 1])
78+
79+
# Create channel info and montage
80+
eeg_ch_names = data["EEG_MI_train"][0, 0][8][0]
81+
ch_names = [elem[0] for elem in eeg_ch_names] + ["stim"]
82+
ch_types = ["eeg"] * 62 + ["stim"]
83+
sfreq = data["EEG_MI_train"][0, 0][3][0, 0]
84+
info = create_info(ch_names=ch_names, ch_types=ch_types, sfreq=sfreq)
85+
montage = make_standard_montage("standard_1005")
86+
87+
# Create raw_data
88+
raw_train_data = np.transpose(data["EEG_MI_train"][0, 0][0], (1, 2, 0))
89+
raw_test_data = np.transpose(data["EEG_MI_test"][0, 0][0], (1, 2, 0))
90+
raw_data = np.concatenate([raw_train_data, raw_test_data], axis=0)
91+
92+
# Create raw_event
93+
train_event_id = data["EEG_MI_train"][0, 0][4].ravel()
94+
test_event_id = data["EEG_MI_test"][0, 0][4].ravel()
95+
event_id = np.concatenate([train_event_id, test_event_id], axis=0)
96+
raw_events = np.zeros((raw_data.shape[0], 1, raw_data.shape[2]))
97+
raw_events[:, 0, 0] = event_id
98+
99+
# Zero pad the data
100+
data = np.concatenate([raw_data, raw_events], axis=1)
101+
zeroshape = (data.shape[0], data.shape[1], 50)
102+
data = np.concatenate(
103+
[np.zeros(zeroshape), data, np.zeros(zeroshape)], axis=2
104+
)
105+
106+
# Create RawArray
107+
raw = RawArray(
108+
data=np.concatenate(list(data), axis=1), info=info, verbose=False
109+
)
110+
raw.set_montage(montage)
111+
112+
# add the data to sessions
113+
session_name = "session_{}".format(session)
114+
sessions[session_name] = {"run_1": raw}
115+
116+
return sessions
117+
118+
def data_path(
119+
self, subject, path=None, force_update=False, update_path=None, verbose=None
120+
):
121+
122+
if subject not in self.subject_list:
123+
raise (ValueError("Invalid subject number"))
124+
125+
subject_paths = []
126+
for session in range(1, 3):
127+
url = "{0}session{1}/s{2}/sess{1:02d}_subj{2:02d}_EEG_MI.mat".format(
128+
Lee2019_URL, session, subject
129+
)
130+
131+
data_path = dl.data_path(
132+
url, "Lee2019_MI", path, force_update, update_path, verbose
133+
)
134+
subject_paths.append(data_path)
135+
136+
return subject_paths

moabb/datasets/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121

2222
# flake8: noqa
2323
from .gigadb import Cho2017
24+
from .Lee2019 import Lee2019_MI
2425
from .mpi_mi import MunichMI
2526
from .physionet_mi import PhysionetMI
2627
from .schirrmeister2017 import Schirrmeister2017

0 commit comments

Comments
 (0)