Skip to content

Commit 11f7b29

Browse files
authored
Improvements: debug lcu and add new metrics (#38)
1 parent fedf645 commit 11f7b29

File tree

12 files changed

+1283
-599
lines changed

12 files changed

+1283
-599
lines changed

README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,15 @@ It's a Python implementation of a image segmentation algorithm that
99
combines superpixel with complex networks dynamics. In this setup, we
1010
classify the algorithm as transductive as well.
1111

12+
# Showcase on grabcut dataset
13+
14+
![showcase](pics/egsis-showcase.png)
15+
16+
First segmentation mask is the result of EGSIS segmentation over lasso
17+
annotation from GrabCut dataset, second segmentation mask is the
18+
ground truth.
19+
20+
1221
# What is transductive segmentation?
1322

1423
Transductive segmentation is a concept in machine learning and

egsis/lcu.py

Lines changed: 16 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -6,35 +6,14 @@
66

77
from loguru import logger
88

9-
"""lerax - sex 09 dez 2022 13:55:34
10-
Alguma coisa não está certa... as subnetworks não são disjuntas, a
11-
função g que gera novas particulas quase sempre está zerada... a
12-
matriz delta só fica com elementos na diagonal... tem algo errado aqui
13-
14-
lerax - sáb 17 dez 2022 02:47:16
15-
16-
óh céus!!! A função de evolução não tá mudando em nada depois das
17-
primeiras iterações! Será que existe mais algum bug na implementação
18-
das equações!? Será que eu deveria fazer o que o verri falou e usar o
19-
paper mais novo?! Deixei em research/lcu_simplified_improved.pdf
20-
21-
esse documento deve ajudar
22-
23-
lerax - qua 21 dez 2022 10:10:58
24-
25-
Parece que a evolução de nc está incorreta, inverti a multiplicação de
26-
matrizes, pois geralmente é matrix x vetor, não vetor x matrix...
27-
Os resultados começaram a ter maior variação, mas parecem estar invertidos..
28-
"""
29-
309

3110
class LabeledComponentUnfolding:
3211

3312
"""
3413
Collective Dynamic Labeled Component Unfolding
3514
3615
37-
It can be used to solve Semi-Supervised problems.
16+
It can be used to solve Transductive Semi-Supervised problems.
3817
3918
4019
Parameters
@@ -184,11 +163,13 @@ def p(self, G: nx.graph, i: int, j: int, c: int) -> float:
184163
edge_weight = G.edges[i, j]["weight"]
185164
walk = edge_weight / G.degree[i]
186165
survival = 1 - self.competition_level * self.sigma(G, i, j, c)
187-
166+
# FIXME: weirdly, sigma function only outputs 1, 0.5 or 0
167+
# it doesn't makes sense
168+
# logger.trace(f"survival factor = {survival}")
188169
return walk * survival
189170

190171
def probability(self, G: nx.Graph) -> np.ndarray:
191-
"""Matrix with probabilities of particle survival"""
172+
"""Matrix with probabilities of particle surviving"""
192173
P = np.zeros(shape=self.N.shape)
193174
C, nodes, _ = P.shape
194175
for c in range(C):
@@ -198,6 +179,8 @@ def probability(self, G: nx.Graph) -> np.ndarray:
198179
return P
199180

200181
def probability_of_new_particles(self, G: nx.Graph, c: int) -> np.ndarray:
182+
# FIXME: review this code based on the paper
183+
# equation on page 4
201184
node_degrees = [
202185
G.degree[node] for node in G.nodes
203186
]
@@ -222,7 +205,7 @@ def n0(self, G: nx.Graph) -> np.ndarray:
222205
if label != 0:
223206
labels[cls, idx] = label
224207
particles = labels * population * self.scale_particles
225-
logger.debug(f"n0: {particles}")
208+
logger.debug(f"n0: \n{particles}")
226209
return particles
227210

228211
def N0(self, G: nx.Graph):
@@ -241,17 +224,18 @@ def delta0(self, G: nx.Graph):
241224
return np.zeros(shape=(self.n_classes, nodes, nodes))
242225

243226
def sigma(self, G: nx.Graph, i: int, j: int, c: int) -> float:
244-
"""Matrix with current relative domination sigma_ij[c]
227+
"""Current relative subordination sigma_ij[c]
245228
246-
Number of particles with label=c which moved from v_i to
247-
v_j in the current time.
229+
The fraction of particles that do not belong to class c and
230+
have sucessfuly passed thorugh edge (i, j) in any direction at
231+
the current time.
248232
"""
249-
S = np.sum((self.N[:, i, j] + self.N[:, j, i].T).flatten())
233+
S = np.sum((self.N[:, i, j] + self.N[:, j, i]).flatten())
250234
result: float
251-
if S <= 0:
252-
result = 1 - (1 / self.n_classes)
253-
else:
235+
if S > 0:
254236
result = 1 - ((self.N[c][i][j] + self.N[c][j][i]) / S)
237+
else:
238+
result = 1 - (1 / self.n_classes)
255239

256240
return result
257241

egsis/metrics.py

Lines changed: 39 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,42 @@ def f1(y_true: np.ndarray, y_pred: np.ndarray) -> float:
3030

3131

3232
def err(y_true: np.ndarray, y_pred: np.ndarray) -> float:
33-
"""Error rate (mis-segmentation rate)"""
34-
roi_pixels = y_true
35-
mis_segmentation = (y_true | y_pred) - y_true
36-
return mis_segmentation.sum() / roi_pixels.sum()
33+
"""This function calculates the error rate, also known as the
34+
mis-segmentation rate, between the true and predicted values.
35+
36+
Parameters
37+
---------
38+
y_true : np.ndarray
39+
The ground truth binary labels. The binary label indicates
40+
whether each pixel is within the region of interest (ROI) or
41+
not.
42+
y_pred : np.ndarray:
43+
The predicted binary labels. The binary label indicates
44+
whether each pixel is predicted to be within the ROI or not.
45+
46+
Returns
47+
-------
48+
err : float
49+
50+
The mis-segmentation rate. This is calculated as the number of
51+
mis-segmented pixels (false-positives and false-negatives) divided
52+
by the total number of pixels in the ROI.
53+
"""
54+
union = (y_true | y_pred)
55+
intersection = (y_true & y_pred)
56+
mis_segmentation = (union - intersection).sum()
57+
w, h = y_true.shape
58+
roi = w * h
59+
return mis_segmentation / roi
60+
61+
62+
def recall(y_true: np.ndarray, y_pred: np.ndarray) -> float:
63+
tp = (y_true & y_pred).sum()
64+
fn = ((y_true | y_pred) - y_pred).sum()
65+
return tp / (tp + fn)
66+
67+
68+
def precision(y_true: np.ndarray, y_pred: np.ndarray) -> float:
69+
tp = (y_true & y_pred).sum()
70+
fp = ((y_true | y_pred) - y_true).sum()
71+
return tp / (tp + fp)

egsis/model.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,16 +107,19 @@ def build_complex_network(
107107
segments=segments,
108108
labels=y
109109
)
110+
logger.info("Complex networks: compute node labels finished.")
110111
complex_networks.compute_node_features(
111112
graph=G,
112113
img=X,
113114
segments=segments,
114115
feature_method=self.feature_extraction
115116
)
117+
logger.info("Complex networks: feature extraction finished.")
116118
complex_networks.compute_edge_weights(
117119
graph=G,
118120
similarity_function=self.feature_similarity
119121
)
122+
logger.info("Complex networks: compute node weights finished.")
120123
return G
121124

122125
def sub_networks_to_matrix(self, sub_networks: List[networkx.Graph]):
@@ -134,15 +137,17 @@ def fit_predict(self, X: numpy.ndarray, y: numpy.ndarray):
134137
luminosity of each color channel of RGB
135138
y : numpy.ndarray (shape=(n, m))
136139
it's the label matrix with partial annotation, to be full
137-
filled Every non-zero value it's a label, and zero it's
140+
filled every non-zero value it's a label, and zero it's
138141
an unlabeled pixel.
139142
Returns
140143
-------
141144
new y matrix with full filled labels.
142145
"""
143146
logger.info("Run!")
144147
self.segments = self.build_superpixels(X)
148+
logger.info("Superpixels: finished.")
145149
self.G = self.build_complex_network(X, y, self.segments)
150+
logger.info("Complex networks: finished.")
146151
n_classes = len(numpy.unique(y)) - 1
147152
collective_dynamic = lcu.LabeledComponentUnfolding(
148153
competition_level=self.lcu_competition_level,
@@ -152,6 +157,7 @@ def fit_predict(self, X: numpy.ndarray, y: numpy.ndarray):
152157

153158
self.sub_networks = collective_dynamic.fit_predict(self.G)
154159
self.G_pred = collective_dynamic.classify_vertexes(self.sub_networks)
160+
logger.info("Dynamic collective LCU: finished.")
155161

156162
# FIXME: should return a matrix y with new labels
157163
return self.G_pred

notebooks/annotator_egsis.ipynb

Lines changed: 57 additions & 23 deletions
Large diffs are not rendered by default.

notebooks/benchmark.ipynb

Lines changed: 571 additions & 443 deletions
Large diffs are not rendered by default.

notebooks/labeled_component_unfolding.ipynb

Lines changed: 364 additions & 35 deletions
Large diffs are not rendered by default.

notebooks/model_egsis.ipynb

Lines changed: 53 additions & 49 deletions
Large diffs are not rendered by default.

notebooks/showcase.ipynb

Lines changed: 155 additions & 0 deletions
Large diffs are not rendered by default.

notebooks/superpixel_complex_networks.ipynb

Lines changed: 11 additions & 11 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)