Skip to content

Commit 6e2aa70

Browse files
committed
Updated scripts
1 parent 1bbc61b commit 6e2aa70

File tree

10 files changed

+1617
-0
lines changed

10 files changed

+1617
-0
lines changed
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
// MatLab Sphere coordinates [degrees] Cartesian coordinates
2+
// Label Theta Phi Radius X Y Z off sphere surface
3+
1 90.000 73.000 1.000 0.0000 0.2924 0.9563 0.00000000000000000
4+
2 120.000 56.000 1.000 -0.2796 0.4843 0.8290 0.00000000000000000
5+
3 180.000 56.000 1.000 -0.5592 0.0000 0.8290 0.00000000000000000
6+
4 146.000 39.000 1.000 -0.6443 0.4346 0.6293 -0.00000000000000022
7+
5 102.000 39.000 1.000 -0.1616 0.7602 0.6293 0.00000000000000000
8+
6 126.000 22.000 1.000 -0.5450 0.7501 0.3746 0.00000000000000000
9+
7 162.000 22.000 1.000 -0.8818 0.2865 0.3746 0.00000000000000000
10+
8 168.000 5.000 1.000 -0.9744 0.2071 0.0872 0.00000000000000000
11+
9 134.000 5.000 1.000 -0.6920 0.7166 0.0872 0.00000000000000000
12+
10 99.000 5.000 1.000 -0.1558 0.9839 0.0872 0.00000000000000000
13+
11 122.000 -10.000 1.000 -0.5219 0.8352 -0.1736 -0.00000000000000011
14+
12 154.000 -12.000 1.000 -0.8792 0.4288 -0.2079 0.00000000000000000
15+
13 170.000 -29.000 1.000 -0.8613 0.1519 -0.4848 -0.00000000000000022
16+
14 136.000 -29.000 1.000 -0.6291 0.6076 -0.4848 -0.00000000000000011
17+
15 144.000 -46.000 1.000 -0.5620 0.4083 -0.7193 0.00000000000000000
18+
16 110.000 -61.000 1.000 -0.1658 0.4556 -0.8746 0.00000000000000000
19+
17 -90.000 73.000 1.000 0.0000 -0.2924 0.9563 0.00000000000000000
20+
18 240.000 56.000 1.000 -0.2796 -0.4843 0.8290 0.00000000000000000
21+
19 258.000 39.000 1.000 -0.1616 -0.7602 0.6293 0.00000000000000000
22+
20 214.000 39.000 1.000 -0.6443 -0.4346 0.6293 0.00000000000000000
23+
21 198.000 22.000 1.000 -0.8818 -0.2865 0.3746 0.00000000000000022
24+
22 234.000 22.000 1.000 -0.5450 -0.7501 0.3746 0.00000000000000000
25+
23 -90.000 22.000 1.000 0.0000 -0.9272 0.3746 0.00000000000000000
26+
24 236.000 5.000 1.000 -0.5571 -0.8259 0.0872 0.00000000000000000
27+
25 202.000 5.000 1.000 -0.9237 -0.3732 0.0872 0.00000000000000000
28+
26 190.000 -12.000 1.000 -0.9633 -0.1699 -0.2079 0.00000000000000000
29+
27 222.000 -12.000 1.000 -0.7269 -0.6545 -0.2079 0.00000000000000022
30+
28 254.000 -12.000 1.000 -0.2696 -0.9403 -0.2079 0.00000000000000000
31+
29 238.000 -29.000 1.000 -0.4635 -0.7417 -0.4848 -0.00000000000000011
32+
30 206.000 -29.000 1.000 -0.7861 -0.3834 -0.4848 -0.00000000000000011
33+
31 191.000 -46.000 1.000 -0.6819 -0.1325 -0.7193 0.00000000000000000
34+
32 251.000 -46.000 1.000 -0.2262 -0.6568 -0.7193 0.00000000000000000
35+
33 0.000 90.000 1.000 0.0000 0.0000 1.0000 0.00000000000000000
36+
34 60.000 56.000 1.000 0.2796 0.4843 0.8290 0.00000000000000000
37+
35 0.000 56.000 1.000 0.5592 0.0000 0.8290 0.00000000000000000
38+
36 34.000 39.000 1.000 0.6443 0.4346 0.6293 -0.00000000000000022
39+
37 79.000 39.000 1.000 0.1483 0.7629 0.6293 0.00000000000000000
40+
38 90.000 22.000 1.000 0.0000 0.9272 0.3746 0.00000000000000000
41+
39 54.000 22.000 1.000 0.5450 0.7501 0.3746 0.00000000000000000
42+
40 18.000 22.000 1.000 0.8818 0.2865 0.3746 0.00000000000000000
43+
41 12.000 5.000 1.000 0.9744 0.2071 0.0872 0.00000000000000000
44+
42 46.000 5.000 1.000 0.6920 0.7166 0.0872 -0.00000000000000011
45+
43 81.000 5.000 1.000 0.1558 0.9839 0.0872 0.00000000000000000
46+
44 58.000 -10.000 1.000 0.5219 0.8352 -0.1736 0.00000000000000000
47+
45 26.000 -12.000 1.000 0.8792 0.4288 -0.2079 0.00000000000000022
48+
46 10.000 -29.000 1.000 0.8613 0.1519 -0.4848 -0.00000000000000022
49+
47 44.000 -29.000 1.000 0.6291 0.6076 -0.4848 -0.00000000000000011
50+
48 36.000 -46.000 1.000 0.5620 0.4083 -0.7193 0.00000000000000000
51+
49 -60.000 56.000 1.000 0.2796 -0.4843 0.8290 0.00000000000000000
52+
50 -78.000 39.000 1.000 0.1616 -0.7602 0.6293 0.00000000000000000
53+
51 -34.000 39.000 1.000 0.6443 -0.4346 0.6293 -0.00000000000000022
54+
52 -18.000 22.000 1.000 0.8818 -0.2865 0.3746 0.00000000000000000
55+
53 -54.000 22.000 1.000 0.5450 -0.7501 0.3746 0.00000000000000000
56+
54 -90.000 5.000 1.000 0.0000 -0.9962 0.0872 0.00000000000000000
57+
55 -56.000 5.000 1.000 0.5571 -0.8259 0.0872 0.00000000000000022
58+
56 -22.000 5.000 1.000 0.9237 -0.3732 0.0872 0.00000000000000000
59+
57 -10.000 -12.000 1.000 0.9633 -0.1699 -0.2079 -0.00000000000000011
60+
58 -42.000 -12.000 1.000 0.7269 -0.6545 -0.2079 0.00000000000000022
61+
59 -74.000 -12.000 1.000 0.2696 -0.9403 -0.2079 0.00000000000000022
62+
60 -90.000 -29.000 1.000 0.0000 -0.8746 -0.4848 -0.00000000000000011
63+
61 -58.000 -29.000 1.000 0.4635 -0.7417 -0.4848 -0.00000000000000011
64+
62 -26.000 -29.000 1.000 0.7861 -0.3834 -0.4848 -0.00000000000000011
65+
63 -11.000 -46.000 1.000 0.6819 -0.1325 -0.7193 0.00000000000000000
66+
64 -71.000 -46.000 1.000 0.2262 -0.6568 -0.7193 0.00000000000000000
Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
import mne
2+
import io
3+
import numpy as np
4+
import scipy.io
5+
import pandas as pd
6+
from glob import glob
7+
import datetime
8+
import time
9+
import h5py
10+
11+
session = "s2_r1"
12+
laplacian = False
13+
14+
dataset_path = "/home/data/NDClab/datasets/thrive-dataset/"
15+
analysis_path = "/home/data/NDClab/analyses/thrive-theta-ddm/"
16+
17+
outputHeader = [
18+
'id',
19+
'ERN_soc', 'CRN_soc', 'ERN_nonsoc', 'CRN_nonsoc',
20+
'ERN_min_CRN_diff_soc', 'ERN_min_CRN_diff_nonsoc',
21+
# 'PE_error_soc', 'PE_corr_soc', 'PE_error_nonsoc', 'PE_corr_nonsoc',
22+
# 'PE_err_min_corr_diff_soc', 'PE_err_min_corr_diff_nonsoc'
23+
]
24+
25+
output_data = pd.DataFrame()
26+
27+
clustCell= [
28+
[i-1 for i in [1, 2, 33, 34]],
29+
# [i-1 for i in [17, 49, 50, 19, 18]],
30+
]
31+
32+
timeCell = [
33+
[0, 100], # ERN cluster
34+
# [300, 500], # PE cluster
35+
]
36+
37+
if laplacian:
38+
path_to_mat = glob(f"{analysis_path}/derivatives/preprocessed/erp_check/{session}/thrive_Resp_erps_csd_min_6t_*2025*.mat")[0]
39+
else:
40+
path_to_mat = glob(f"{analysis_path}/derivatives/preprocessed/erp_check/{session}/thrive_Resp_erps_min_6t_*2025*.mat")[0]
41+
#path_to_mat = glob(f"{analysis_path}/derivatives/preprocessed/erp_check/{session}/thrive_Resp_erps_min_6t_02_11_2025_15_17_33.mat")[0]
42+
43+
path_to_eeg = glob(f"{dataset_path}/derivatives/preprocessed/sub-3000001/{session}/eeg/sub-3000001_all_eeg_processed_data_{session}_e1.set")[0]
44+
45+
mat = scipy.io.loadmat(path_to_mat)
46+
allData = mat['erpDat_data']
47+
48+
# take IDs from EEG (all people > 6 trials)
49+
sub_from_eeg = [int(mat["erpDat_subIds"][i].item()[0]) for i in range(len(mat["erpDat_subIds"]))]
50+
51+
EEG = mne.io.read_epochs_eeglab(path_to_eeg, verbose=False)
52+
53+
EEG_times = EEG.times * 1000
54+
startTime = -400
55+
endTime = -200
56+
57+
startIdx = np.argmin(np.abs(EEG_times-startTime)) # get start index for baseline
58+
endIdx = np.argmin(np.abs(EEG_times-endTime)) # get end index for baseline
59+
60+
allBase = np.squeeze(np.mean(allData[:, :, :, startIdx:endIdx+1], 3))
61+
allBase = np.mean(allData[:, :, :, startIdx:endIdx+1], 3)
62+
newData = np.zeros_like(allData)
63+
64+
for i in range(allData.shape[3]):
65+
newData[:, :, :, i] = allData[:, :, :, i] - allBase # baseline correction
66+
67+
# %round EEG.times to nearest whole ms to make easier to work with
68+
# EEG.times = round(EEG.times);
69+
70+
output_data[outputHeader[0]] = sub_from_eeg
71+
72+
# initialize index var at 1 because i=0 is the column for subject ids
73+
i = 1
74+
for comp in range(len(clustCell)):
75+
76+
cluster= clustCell[comp]
77+
times = timeCell[comp]
78+
79+
compStartTime = times[0] # in ms
80+
compEndTime = times[1] # in ms
81+
82+
compStartIdx = np.argmin(np.abs(EEG_times-compStartTime))
83+
compEndIdx = np.argmin(np.abs(EEG_times-compEndTime))
84+
85+
s_resp_incon_error_avgTime = np.mean(newData[:, 0:1, :, compStartIdx:compEndIdx+1], 3)
86+
s_resp_incon_corr_avgTime = np.mean(newData[:, 1:2, :, compStartIdx:compEndIdx+1], 3)
87+
ns_resp_incon_error_avgTime = np.mean(newData[:, 2:3, :, compStartIdx:compEndIdx+1], 3)
88+
ns_resp_incon_corr_avgTime = np.mean(newData[:, 3:4, :, compStartIdx:compEndIdx+1], 3)
89+
90+
# average cluster of interest
91+
s_resp_incon_error_avgTimeClust = np.mean(s_resp_incon_error_avgTime[:, :, cluster], 2)
92+
s_resp_incon_corr_avgTimeClust = np.mean(s_resp_incon_corr_avgTime[:, :, cluster], 2)
93+
ns_resp_incon_error_avgTimeClust = np.mean(ns_resp_incon_error_avgTime[:, :, cluster], 2)
94+
ns_resp_incon_corr_avgTimeClust = np.mean(ns_resp_incon_corr_avgTime[:, :, cluster], 2)
95+
96+
# compute difference scores
97+
s_resp_incon_error_avgTimeClust_diff = s_resp_incon_error_avgTimeClust - s_resp_incon_corr_avgTimeClust
98+
ns_resp_incon_error_avgTimeClust_diff = ns_resp_incon_error_avgTimeClust - ns_resp_incon_corr_avgTimeClust
99+
100+
output_data[outputHeader[i]] = s_resp_incon_error_avgTimeClust
101+
output_data[outputHeader[i+1]] = s_resp_incon_corr_avgTimeClust
102+
output_data[outputHeader[i+2]] = ns_resp_incon_error_avgTimeClust
103+
output_data[outputHeader[i+3]] = ns_resp_incon_corr_avgTimeClust
104+
output_data[outputHeader[i+4]] = s_resp_incon_error_avgTimeClust_diff
105+
output_data[outputHeader[i+5]] = ns_resp_incon_error_avgTimeClust_diff
106+
i+=6
107+
108+
output_data
109+
output_data = output_data.iloc[:, :5]
110+
if laplacian:
111+
output_data.columns = [i + "_laplacian" if i != "id" else i for i in output_data.columns]
112+
output_data = output_data.rename({"id": "sub"}, axis=1)
113+
114+
if laplacian:
115+
output_data.to_csv("{analysis_path}/derivatives/csv/{session}/thrive_erp_laplacian.csv", index=False)
116+
else:
117+
output_data.to_csv(f"{analysis_path}/derivatives/csv/{session}/thrive_erp.csv", index=False)
Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
import io
2+
import numpy as np
3+
import matplotlib.pyplot as plt
4+
import scipy.io
5+
import pandas as pd
6+
from glob import glob
7+
import datetime
8+
import time
9+
import re
10+
import h5py
11+
12+
13+
session = "s2_r1"
14+
dataset_path = "/home/data/NDClab/analyses/thrive-theta-ddm/"
15+
16+
arr_path = f"{dataset_path}/derivatives/preprocessed/TF_arrays/{session}/"
17+
helper_data = h5py.File(
18+
glob(f"{dataset_path}/derivatives/preprocessed/TF_outputs/{session}/resp/seed_1/TF/sub-*.mat")[0]
19+
)
20+
21+
freqs = helper_data['frequency'][:]
22+
times = helper_data['ds_time'][:]
23+
ch_locs = [str(i) for i in range(1, 65)]
24+
25+
26+
# NOT DIFFERENCE ICPS
27+
28+
thrive_data = pd.read_csv(f"{dataset_path}/derivatives/behavior/{session}/summary.csv")["sub"].to_frame()
29+
30+
measures = [
31+
"ICPS",
32+
]
33+
conditions = [
34+
"resp_s_i_0",
35+
"resp_s_i_1",
36+
# "resp_s_c_1",
37+
"resp_ns_i_0",
38+
"resp_ns_i_1",
39+
# "resp_ns_c_1",
40+
]
41+
42+
for m in measures:
43+
for c in conditions:
44+
for band in [
45+
"theta",
46+
# "delta"
47+
]:
48+
for window in [
49+
"early",
50+
"late"
51+
]:
52+
for cluster in [
53+
"DLPFC_L",
54+
"DLPFC_R",
55+
"OCC_L",
56+
"OCC_R",
57+
"MOTOR_L",
58+
"MOTOR_R",
59+
#"CENTRAL"
60+
]:
61+
if cluster == "DLPFC_L":
62+
ch = ['6', '9']
63+
elif cluster == "DLPFC_R":
64+
ch = ['39', '42']
65+
if cluster == "OCC_L":
66+
ch = ['22', '24']
67+
elif cluster == "OCC_R":
68+
ch = ['53', '55']
69+
elif cluster == "MOTOR_L":
70+
ch = ['3', '7']
71+
elif cluster == "MOTOR_R":
72+
ch = ['35', '40']
73+
elif cluster == "CENTRAL":
74+
ch = ['19', '50']
75+
76+
if band == "theta":
77+
fmin = 4
78+
fmax = 7
79+
elif band == "delta":
80+
fmin = 1
81+
fmax = 3
82+
83+
if window == "early":
84+
tmin = 0
85+
tmax = 250
86+
elif window == "late":
87+
tmin = 256
88+
tmax = 504
89+
90+
fmin_idx = np.argmin(np.abs(freqs-fmin))
91+
assert freqs[fmin_idx] == fmin, "Check your freqs!"
92+
fmax_idx = np.argmin(np.abs(freqs-fmax))
93+
assert freqs[fmax_idx] == fmax, "Check your freqs!"
94+
95+
tmin_idx = np.argmin(np.abs(times-tmin))
96+
# assert times[tmin_idx] == tmin, "Check your times!"
97+
tmax_idx = np.argmin(np.abs(times-tmax))
98+
# assert times[tmax_idx] == tmax, "Check your times!"
99+
100+
ch_idx = []
101+
for channel in ch:
102+
if channel in ch_locs:
103+
ch_idx.append(ch_locs.index(channel))
104+
105+
# sub_idx = scipy.io.loadmat(f"{arr_path}/idx_{c}.mat")["sub_idx"][0]-1 # make it 0-based again
106+
tf_df = pd.DataFrame(columns = ["sub", f"{m}_{c}_{band}_{window}_{cluster}"])
107+
tf_arr = scipy.io.loadmat(f"{arr_path}/{m}_{c}.mat")
108+
sub_ids = tf_arr['subjects']
109+
pattern = r'sub-(\d+)'
110+
sub_ids = [int(re.search(pattern, i).group(1)) for i in sub_ids]
111+
tf_data = tf_arr[f"{m}_{c}"]
112+
assert tf_data.shape[1:] == (64, 375, 59), f"Check your {m} data!"
113+
114+
# for sub_id in sub_idx:
115+
for sub_id in range(tf_data.shape[0]):
116+
# sub_avg = np.mean(tf_data[sub_id, :, :, :], 0)
117+
sub_avg = tf_data[sub_id, :, :, :]
118+
assert sub_avg.shape == (64, 375, 59), f"Check your {m} data!"
119+
120+
ch_avg = np.mean(sub_avg[ch_idx, :, :], 0)
121+
assert ch_avg.shape == (375, 59), f"Check your {m} data!"
122+
123+
time_avg = np.mean(ch_avg[tmin_idx:tmax_idx+1, :], 0)
124+
assert len(time_avg) == 59 and time_avg.ndim == 1, f"Check your {m} data!"
125+
freq_avg = np.mean(time_avg[fmin_idx:fmax_idx+1], 0)
126+
127+
tf_df.loc[sub_id, "sub"] = sub_ids[sub_id]
128+
tf_df.loc[sub_id, f"{m}_{c}_{band}_{window}_{cluster}"] = freq_avg
129+
130+
thrive_data = thrive_data.merge(tf_df, on="sub", how="left")
131+
132+
thrive_data = thrive_data[
133+
[i for i in thrive_data.columns if ("delta" not in i or i == "sub")]
134+
]
135+
136+
colnames = list(thrive_data.columns)
137+
for i, c in enumerate(colnames[1:]):
138+
i+=1
139+
splitted_list = c.split("_")
140+
if splitted_list[2] == "s":
141+
splitted_list[2] = "soc"
142+
elif splitted_list[2] == "ns":
143+
splitted_list[2] = "nonsoc"
144+
if splitted_list[4] == "0":
145+
splitted_list[4] = "err"
146+
elif splitted_list[4] == "1":
147+
splitted_list[4] = "corr"
148+
splitted_list[1] = ""
149+
splitted_list[5] = ""
150+
splitted_list[3] = ""
151+
splitted_list = [i for i in splitted_list if i!=""]
152+
colnames[i] = "_".join(splitted_list)
153+
154+
thrive_data.columns = colnames
155+
156+
thrive_data.to_csv(f"{dataset_path}/derivatives/csv/{session}/thrive_icps.csv", index=False)

0 commit comments

Comments
 (0)