Skip to content

Commit da45444

Browse files
committed
Improved the ability to add a list of badly damaged sections and a method to automatically detect them
1 parent 5d815b2 commit da45444

File tree

3 files changed

+97
-40
lines changed

3 files changed

+97
-40
lines changed

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,4 +34,6 @@ var/
3434
# Installer logs
3535
pip-log.txt
3636
pip-delete-this-directory.txt
37-
DeepSlice.egg-info
37+
DeepSlice.egg-info
38+
test.py
39+
.vscode/settings.json

DeepSlice/coord_post_processing/spacing_and_indexing.py

Lines changed: 92 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Union, List
1+
from typing import Union, List, Optional
22
import numpy as np
33
import pandas as pd
44
import re
@@ -23,7 +23,7 @@ def trim_mean(arr: np.array, percent: int) -> float:
2323

2424

2525
def calculate_average_section_thickness(
26-
section_numbers: List[Union[int, float]], section_depth: List[Union[int, float]], method="weighted",
26+
section_numbers: List[Union[int, float]], section_depth: List[Union[int, float]], bad_sections, method="weighted",
2727
species="mouse"
2828
) -> float:
2929
"""
@@ -37,33 +37,28 @@ def calculate_average_section_thickness(
3737
:rtype: float
3838
"""
3939
# inter section number differences
40+
if bad_sections is not None:
41+
section_numbers = section_numbers[bad_sections == False].reset_index(drop=True)
42+
section_depth = section_depth[bad_sections == False]
4043
number_spacing = section_numbers[:-1].values - section_numbers[1:].values
4144
# inter section depth differences
4245
depth_spacing = section_depth[:-1] - section_depth[1:]
4346
# dividing depth spacing by number spacing allows us to control for missing sections
44-
min = 0
45-
max = np.max(section_numbers)
46-
if species == "mouse":
47-
min, max = 0, 528
48-
elif species == "rat":
49-
min, max = 0, 1024
50-
if method == "weighted":
51-
weighted_accuracy = plane_alignment.make_gaussian_weights(max + 1)
52-
weighted_accuracy = [weighted_accuracy[int(y)] for y in section_numbers]
53-
elif method == None:
54-
weighted_accuracy = [1 for y in section_numbers]
55-
47+
weighted_accuracy = calculate_weighted_accuracy(section_numbers, section_depth, species, None, method)
5648
section_thicknesses = depth_spacing / number_spacing
57-
if len(section_numbers) <= 2:
58-
weighted_accuracy = [1, 1]
5949
average_thickness = np.average(section_thicknesses, weights = weighted_accuracy[1:])
6050
return average_thickness
6151

6252

53+
54+
55+
56+
6357
def ideal_spacing(
6458
section_numbers: List[Union[int, float]],
6559
section_depth: List[Union[int, float]],
6660
average_thickness: Union[int, float],
61+
bad_sections: List[bool] = None,
6762
method = "weighted",
6863
species = "mouse"
6964
) -> float:
@@ -82,17 +77,8 @@ def ideal_spacing(
8277
# unaligned voxel position of section numbers (evenly spaced depths)
8378
index_spaced_depth = section_numbers * average_thickness
8479
# average distance between the depths and the evenly spaced depths
85-
if species == "mouse":
86-
min, max = 0, 528
87-
elif species == "rat":
88-
min, max = 0, 1024
89-
if method == "weighted":
90-
weighted_accuracy = plane_alignment.make_gaussian_weights(max + 1)
91-
weighted_accuracy = [weighted_accuracy[int(y)] for y in section_numbers]
92-
elif method == None:
93-
weighted_accuracy = [1 for y in section_numbers]
94-
if len(section_numbers) <= 2:
95-
weighted_accuracy = [0.5, 0.5]
80+
81+
weighted_accuracy = calculate_weighted_accuracy(section_numbers, section_depth, species, bad_sections, method)
9682
distance_to_ideal = np.average(section_depth - index_spaced_depth, weights = weighted_accuracy)
9783
# adjust the evenly spaced depths to minimise their distance to the predicted depths
9884
ideal_index_spaced_depth = index_spaced_depth + distance_to_ideal
@@ -138,16 +124,26 @@ def enforce_section_ordering(predictions):
138124
depths = np.array(depths)
139125
direction = determine_direction_of_indexing(depths)
140126
predictions["depths"] = depths
141-
127+
142128
temp = predictions.copy()
143129
if direction == "caudal-rostro":
144130
ascending = False
145131
if direction == "rostro-caudal":
146132
ascending = True
147-
temp = temp.sort_values(by=["depths"], ascending=ascending).reset_index(
148-
drop=True
149-
)
150-
predictions["oy"] = temp["oy"]
133+
if "bad_section" in temp:
134+
temp_good = temp[temp["bad_section"] == False].copy().reset_index(drop=True)
135+
temp_good_copy = temp_good.copy()
136+
temp_good_copy = temp_good_copy.sort_values(by=["depths"], ascending=ascending).reset_index(
137+
drop=True
138+
)
139+
temp_good["oy"] = temp_good_copy["oy"]
140+
141+
predictions.loc[predictions["bad_section"] == False, "oy"] = temp_good["oy"].values
142+
else:
143+
temp = temp.sort_values(by=["depths"], ascending=ascending).reset_index(drop=True)
144+
145+
146+
predictions["oy"] = temp["oy"].values
151147
return predictions
152148

153149

@@ -164,7 +160,7 @@ def space_according_to_index(predictions, section_thickness = None, voxel_size =
164160
"""
165161
if voxel_size == None:
166162
raise ValueError("voxel_size must be specified")
167-
if section_thickness != None:
163+
if section_thickness is not None:
168164
section_thickness/=voxel_size
169165
predictions["oy"] = predictions["oy"].astype(float)
170166
if len(predictions) == 1:
@@ -174,20 +170,24 @@ def space_according_to_index(predictions, section_thickness = None, voxel_size =
174170
"No section indexes found, cannot space according to a missing index. You likely did not run predict() with section_numbers=True"
175171
)
176172
else:
173+
if 'bad_section' in predictions:
174+
bad_sections = predictions['bad_section'].values
175+
else:
176+
bad_sections = None
177177
predictions = enforce_section_ordering(predictions)
178178
depths = calculate_brain_center_depths(predictions)
179179
depths = np.array(depths)
180180
if not section_thickness:
181181
section_thickness = calculate_average_section_thickness(
182-
predictions["nr"], section_depth = depths, species=species
182+
predictions["nr"], section_depth = depths, bad_sections=bad_sections, species=species
183183
)
184184
if not suppress:
185185
print(f'predicted thickness is {section_thickness * voxel_size}µm')
186186
else:
187187
if not suppress:
188188
print(f'specified thickness is {section_thickness * voxel_size}µm')
189189

190-
calculated_spacing = ideal_spacing(predictions["nr"], depths, section_thickness, None, species)
190+
calculated_spacing = ideal_spacing(predictions["nr"], depths, section_thickness, bad_sections, species=species)
191191
distance_to_ideal = calculated_spacing - depths
192192
predictions["oy"] = predictions["oy"] + distance_to_ideal
193193
return predictions
@@ -223,21 +223,40 @@ def number_sections(filenames: List[str], legacy=False) -> List[int]:
223223
return section_numbers
224224

225225

226-
def set_bad_sections_util(df: pd.DataFrame, bad_sections: List[str]) -> pd.DataFrame:
226+
def set_bad_sections_util(df: pd.DataFrame, bad_sections: List[str], auto = False) -> pd.DataFrame:
227227
"""
228228
Sets the damaged sections and sections which deepslice may not perform well on for a series of predictions
229229
230230
:param bad_sections: List of bad sections
231231
:param df: dataframe of predictions
232+
:param auto: automatically set bad sections based on if theyre badly positioned relative to their section index
232233
:type bad_sections: List[int]
233234
:type df: pandas.DataFrame
235+
:type auto: bool
234236
:return: the input dataframe with bad sections labeled as such
235237
:rtype: pandas.DataFrame
236238
"""
239+
237240
bad_section_indexes = [
238-
df.Filenames.contains(bad_section) for bad_section in bad_sections
241+
df.Filenames.str.contains(bad_section) for bad_section in bad_sections
239242
]
243+
if np.any([np.sum(x)>1 for x in bad_section_indexes]):
244+
raise ValueError("Multiple sections match the same bad section string, make sure each bad section string is unique")
245+
bad_section_indexes = [np.where(x)[0] for x in bad_section_indexes]
246+
bad_section_indexes = np.concatenate(bad_section_indexes)
247+
df.loc[~df.index.isin(bad_section_indexes), "bad_section"] = False
248+
if auto:
249+
df['depths'] = calculate_brain_center_depths(df)
250+
x = df['nr'].values
251+
y = df['depths'].values
252+
m,b = np.polyfit(x,y,1)
253+
residuals = y - (m*x + b)
254+
outliers = np.abs(residuals) > 1.5*np.std(residuals)
255+
df.loc[outliers, 'bad_section'] = True
256+
240257
df.loc[bad_section_indexes, "bad_section"] = True
258+
# make the other sections are False
259+
241260
bad_sections_found = np.sum(bad_section_indexes)
242261
# Tell the user which sections were identified as bad
243262
if bad_sections_found > 0:
@@ -246,3 +265,39 @@ def set_bad_sections_util(df: pd.DataFrame, bad_sections: List[str]) -> pd.DataF
246265
They are:\n {df.Filenames[bad_section_indexes]}"
247266
)
248267
return df
268+
269+
270+
def calculate_weighted_accuracy(section_numbers: List[int], depths: List[float], species: str, bad_sections: List[Optional[bool]] = None, method: str = "weighted") -> List[float]:
271+
"""
272+
Calculates the weighted accuracy of a list of section numbers for a given species
273+
274+
:param section_numbers: List of section numbers
275+
:param species: Species to calculate accuracy for
276+
:param bad_sections: List of bad sections
277+
:param method: Method to use for weighting, defaults to "weighted"
278+
:type section_numbers: List[int]
279+
:type species: str
280+
:type bad_sections: List[Optional[bool]]
281+
:type method: str
282+
:return: List of weighted accuracies
283+
:rtype: List[float]
284+
"""
285+
if species == "mouse":
286+
min, max = 0, 528
287+
elif species == "rat":
288+
min, max = 0, 1024
289+
if method == "weighted":
290+
weighted_accuracy = plane_alignment.make_gaussian_weights(max + 1)
291+
depths = np.array(depths)
292+
depths[depths < min] = min
293+
depths[depths > max] = max
294+
weighted_accuracy = [weighted_accuracy[int(y)] for y in depths]
295+
elif method is None:
296+
weighted_accuracy = [1 for y in section_numbers]
297+
if len(section_numbers) <= 2:
298+
weighted_accuracy = [0.5, 0.5]
299+
300+
if bad_sections is not None:
301+
weighted_accuracy = [x if y == False else 0 for x,y in zip(weighted_accuracy,bad_sections)]
302+
303+
return weighted_accuracy

DeepSlice/main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,15 +101,15 @@ def predict(
101101
self.predictions = predictions
102102
self.image_directory = image_directory
103103

104-
def set_bad_sections(self, bad_sections: list):
104+
def set_bad_sections(self, bad_sections: list, auto = False):
105105
"""
106106
sets the bad sections for a given brain. Must be run after predict()
107107
108108
:param bad_sections: A list of bad sections to ignore when calculating angles and spacing
109109
:type bad_sections: list
110110
"""
111111
self.predictions = spacing_and_indexing.set_bad_sections_util(
112-
self.predictions, bad_sections
112+
self.predictions, bad_sections, auto
113113
)
114114

115115
def enforce_index_order(self):

0 commit comments

Comments
 (0)