Skip to content

Commit 96906a0

Browse files
committed
feat: add AlphaDropout layer with hot-pink node color
Backend: - Add customalphadropout to model_generation._build_layer() - Validates rate in (0, 1) exclusive — strict bounds for AlphaDropout - Falls back to rate=0.5 on empty input - 9 unit tests: valid rates, edge cases, invalid inputs, full model pipeline Frontend: - AlphaDropoutNode component (hot-pink: rgb(220,53,153) / header rgb(190,24,93)) - Color token: node-alphadropout in tailwind.config.js - Registered in Canvas.jsx nodeTypes and defaultParams - Added to Sidebar.jsx layer palette - NodePropertiesPanel.jsx config with SELU usage hint - Helpers.jsx validation — rate must be in (0,1) exclusive - 8 frontend tests covering render, configured, not-configured states All 171 backend tests pass. All 50 frontend tests pass.
1 parent caedcf7 commit 96906a0

9 files changed

Lines changed: 238 additions & 6 deletions

File tree

tensormap-backend/app/services/model_generation.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,5 +120,16 @@ def _build_layer(node: dict, input_tensor):
120120
raise ValueError(f"Dropout rate must be in [0, 1), got {rate!r}.")
121121
return tf.keras.layers.Dropout(rate=rate, name=name)(input_tensor)
122122

123+
elif node_type == "customalphadropout":
124+
raw = params.get("rate", "") if params.get("rate") is not None else ""
125+
raw = str(raw).strip()
126+
try:
127+
rate = float(raw) if raw != "" else 0.5
128+
except (ValueError, TypeError) as err:
129+
raise ValueError(f"AlphaDropout rate must be a number in (0, 1), got {raw!r}.") from err
130+
if not 0.0 < rate < 1.0:
131+
raise ValueError(f"AlphaDropout rate must be strictly between 0 and 1, got {rate!r}.")
132+
return tf.keras.layers.AlphaDropout(rate=rate, name=name)(input_tensor)
133+
123134
else:
124135
raise ValueError(f"Unknown node type: {node_type}")
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
"""Unit tests for AlphaDropout layer in model_generation._build_layer()."""
2+
3+
import pytest
4+
import tensorflow as tf
5+
6+
from app.services.model_generation import _build_layer
7+
8+
9+
def _make_node(rate: str) -> dict:
10+
"""Helper to create an AlphaDropout node dict."""
11+
return {
12+
"id": "alphadropout-test",
13+
"type": "customalphadropout",
14+
"data": {"params": {"rate": rate}},
15+
}
16+
17+
18+
def test_build_alphadropout_basic():
19+
"""AlphaDropout with valid rate builds correctly."""
20+
input_tensor = tf.keras.Input(shape=(10,))
21+
node = _make_node("0.5")
22+
output = _build_layer(node, input_tensor)
23+
assert output.shape[-1] == 10
24+
25+
26+
def test_build_alphadropout_rate_0_1():
27+
"""AlphaDropout with low rate 0.1."""
28+
input_tensor = tf.keras.Input(shape=(20,))
29+
node = _make_node("0.1")
30+
output = _build_layer(node, input_tensor)
31+
assert output.shape == (None, 20)
32+
33+
34+
def test_build_alphadropout_rate_0_9():
35+
"""AlphaDropout with high rate 0.9."""
36+
input_tensor = tf.keras.Input(shape=(15,))
37+
node = _make_node("0.9")
38+
output = _build_layer(node, input_tensor)
39+
assert output.shape == (None, 15)
40+
41+
42+
def test_build_alphadropout_default_rate():
43+
"""AlphaDropout with empty rate falls back to default 0.5."""
44+
input_tensor = tf.keras.Input(shape=(8,))
45+
node = _make_node("")
46+
output = _build_layer(node, input_tensor)
47+
assert output.shape[-1] == 8
48+
49+
50+
def test_build_alphadropout_invalid_rate_zero():
51+
"""AlphaDropout with rate=0 raises ValueError."""
52+
input_tensor = tf.keras.Input(shape=(10,))
53+
node = _make_node("0")
54+
with pytest.raises((ValueError, Exception)):
55+
_build_layer(node, input_tensor)
56+
57+
58+
def test_build_alphadropout_invalid_rate_negative():
59+
"""AlphaDropout with negative rate raises ValueError."""
60+
input_tensor = tf.keras.Input(shape=(10,))
61+
node = _make_node("-0.5")
62+
with pytest.raises((ValueError, Exception)):
63+
_build_layer(node, input_tensor)
64+
65+
66+
def test_build_alphadropout_invalid_rate_over_one():
67+
"""AlphaDropout with rate >= 1 raises ValueError."""
68+
input_tensor = tf.keras.Input(shape=(10,))
69+
node = _make_node("1.5")
70+
with pytest.raises((ValueError, Exception)):
71+
_build_layer(node, input_tensor)
72+
73+
74+
def test_build_alphadropout_invalid_rate_nan():
75+
"""AlphaDropout with non-numeric rate raises ValueError."""
76+
input_tensor = tf.keras.Input(shape=(10,))
77+
node = _make_node("abc")
78+
with pytest.raises((ValueError, Exception)):
79+
_build_layer(node, input_tensor)
80+
81+
82+
def test_build_alphadropout_in_model():
83+
"""AlphaDropout can be used in a full model pipeline."""
84+
inputs = tf.keras.Input(shape=(16,))
85+
node = _make_node("0.3")
86+
dropped = _build_layer(node, inputs)
87+
outputs = tf.keras.layers.Dense(1)(dropped)
88+
model = tf.keras.Model(inputs=inputs, outputs=outputs)
89+
model.compile(optimizer="adam", loss="mse")
90+
assert model.layers[1].__class__.__name__ == "AlphaDropout"
91+
assert model.layers[1].rate == pytest.approx(0.3)

tensormap-frontend/src/components/DragAndDropCanvas/Canvas.jsx

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ import DenseNode from "./CustomNodes/DenseNode/DenseNode";
3030
import FlattenNode from "./CustomNodes/FlattenNode/FlattenNode";
3131
import ConvNode from "./CustomNodes/ConvNode/ConvNode";
3232
import DropoutNode from "./CustomNodes/DropoutNode/DropoutNode";
33+
import AlphaDropoutNode from "./CustomNodes/AlphaDropoutNode/AlphaDropoutNode";
3334
import MaxPoolingNode from "./CustomNodes/MaxPoolingNode/MaxPoolingNode";
3435
import Sidebar from "./Sidebar";
3536
import NodePropertiesPanel from "./NodePropertiesPanel";
@@ -54,6 +55,7 @@ const nodeTypes = {
5455
customflatten: FlattenNode,
5556
customconv: ConvNode,
5657
customdropout: DropoutNode,
58+
customalphadropout: AlphaDropoutNode,
5759
custommaxpool: MaxPoolingNode,
5860
customglobalavgpool: GlobalAvgPoolNode,
5961
};
@@ -537,6 +539,7 @@ function Canvas() {
537539
kernelY: "",
538540
},
539541
customdropout: { rate: "" },
542+
customalphadropout: { rate: "" },
540543
custommaxpool: { pool_size: "", stride: "", padding: "valid" },
541544
customglobalavgpool: {},
542545
};
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import PropTypes from "prop-types";
2+
import { Handle, Position } from "reactflow";
3+
4+
function AlphaDropoutNode({ data, id }) {
5+
const rate = data.params?.rate ?? "";
6+
const configured =
7+
String(rate).trim() !== "" && !isNaN(Number(rate)) && Number(rate) > 0 && Number(rate) < 1;
8+
9+
return (
10+
<div className="w-56 rounded-lg border bg-white shadow-sm">
11+
<Handle type="target" position={Position.Left} isConnectable id={`${id}_in`} />
12+
<div className="rounded-t-lg bg-node-alphadropout px-3 py-1.5 text-xs font-bold text-white">
13+
AlphaDropout
14+
</div>
15+
<div className="px-3 py-2 text-xs text-muted-foreground">
16+
{configured ? `Rate: ${rate}` : "Not configured"}
17+
</div>
18+
<Handle type="source" position={Position.Right} isConnectable id={`${id}_out`} />
19+
</div>
20+
);
21+
}
22+
23+
AlphaDropoutNode.propTypes = {
24+
data: PropTypes.shape({
25+
params: PropTypes.shape({
26+
rate: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
27+
}).isRequired,
28+
}).isRequired,
29+
id: PropTypes.string.isRequired,
30+
};
31+
32+
export default AlphaDropoutNode;
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
import { describe, it, expect } from "vitest";
2+
import { render, screen } from "@testing-library/react";
3+
import { ReactFlowProvider } from "reactflow";
4+
import AlphaDropoutNode from "./AlphaDropoutNode";
5+
6+
const renderNode = (params = {}) =>
7+
render(
8+
<ReactFlowProvider>
9+
<AlphaDropoutNode id="ad-test" data={{ params: { rate: "0.5", ...params } }} />
10+
</ReactFlowProvider>,
11+
);
12+
13+
describe("AlphaDropoutNode", () => {
14+
it("renders the AlphaDropout label", () => {
15+
renderNode();
16+
expect(screen.getByText("AlphaDropout")).toBeDefined();
17+
});
18+
19+
it("displays rate when set", () => {
20+
renderNode({ rate: "0.3" });
21+
expect(screen.getByText("Rate: 0.3")).toBeDefined();
22+
});
23+
24+
it("shows Not configured when rate is empty", () => {
25+
renderNode({ rate: "" });
26+
expect(screen.getByText("Not configured")).toBeDefined();
27+
});
28+
29+
it("shows Not configured when rate is invalid", () => {
30+
renderNode({ rate: "abc" });
31+
expect(screen.getByText("Not configured")).toBeDefined();
32+
});
33+
34+
it("shows Not configured when rate is 0", () => {
35+
renderNode({ rate: "0" });
36+
expect(screen.getByText("Not configured")).toBeDefined();
37+
});
38+
39+
it("shows Not configured when rate is > 1", () => {
40+
renderNode({ rate: "1.5" });
41+
expect(screen.getByText("Not configured")).toBeDefined();
42+
});
43+
44+
it("renders a target handle on the left side", () => {
45+
const { container } = renderNode();
46+
const left = container.querySelector("[data-handlepos='left']");
47+
expect(left).not.toBeNull();
48+
});
49+
50+
it("renders a source handle on the right side", () => {
51+
const { container } = renderNode();
52+
const right = container.querySelector("[data-handlepos='right']");
53+
expect(right).not.toBeNull();
54+
});
55+
});

tensormap-frontend/src/components/DragAndDropCanvas/Helpers.jsx

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,13 @@ export const canSaveModel = (modelName, modelData) => {
4040
if (node.data.params.rate === "" || isNaN(rate) || rate < 0 || rate >= 1) {
4141
return false;
4242
}
43+
} else if (node.type === "customalphadropout") {
44+
const rate = parseFloat(node.data.params.rate);
45+
if (node.data.params.rate === "" || isNaN(rate) || rate <= 0 || rate >= 1) {
46+
return false;
47+
}
4348
}
44-
// customflatten and customdropout have no required params to validate
49+
// customflatten has no required params to validate; customdropout rate is validated above
4550
}
4651
return isGraphConnected(modelData);
4752
};

tensormap-frontend/src/components/DragAndDropCanvas/NodePropertiesPanel.jsx

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -282,6 +282,33 @@ function NodePropertiesPanel({
282282
);
283283
}
284284

285+
if (type === "customalphadropout") {
286+
return (
287+
<Card className="h-fit">
288+
<CardHeader>
289+
<CardTitle className="text-sm">AlphaDropout</CardTitle>
290+
</CardHeader>
291+
<CardContent className="space-y-3">
292+
<div className="space-y-1">
293+
<Label>Rate (exclusive: 0 &lt; rate &lt; 1)</Label>
294+
<Input
295+
type="number"
296+
min="0.001"
297+
max="0.999"
298+
step="0.01"
299+
value={params.rate ?? ""}
300+
onChange={(e) => doUpdate("rate", e.target.value)}
301+
placeholder="0.5"
302+
/>
303+
<p className="text-xs text-muted-foreground">
304+
Keeps mean and variance of inputs. Best used with SELU activations.
305+
</p>
306+
</div>
307+
</CardContent>
308+
</Card>
309+
);
310+
}
311+
285312
if (type === "customdropout") {
286313
return (
287314
<Card className="h-fit">
@@ -290,14 +317,14 @@ function NodePropertiesPanel({
290317
</CardHeader>
291318
<CardContent className="space-y-3">
292319
<div className="space-y-1">
293-
<Label>Rate (01)</Label>
320+
<Label>Rate (0 ≤ rate &lt; 1)</Label>
294321
<Input
295322
type="number"
296323
min="0"
297-
max="1"
298-
step="0.1"
299-
value={params.rate}
300-
onChange={(e) => updateParam("rate", e.target.value)}
324+
max="0.999"
325+
step="0.01"
326+
value={params.rate ?? ""}
327+
onChange={(e) => doUpdate("rate", e.target.value)}
301328
/>
302329
</div>
303330
</CardContent>

tensormap-frontend/src/components/DragAndDropCanvas/Sidebar.jsx

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,13 @@ function Sidebar() {
4545
>
4646
Dropout
4747
</div>
48+
<div
49+
className="cursor-grab rounded-md border border-l-4 border-l-node-alphadropout bg-white px-3 py-2 text-xs font-medium"
50+
onDragStart={(e) => onDragStart(e, "customalphadropout")}
51+
draggable
52+
>
53+
AlphaDropout
54+
</div>
4855
<div
4956
className="cursor-grab rounded-md border border-l-4 border-l-node-conv bg-white px-3 py-2 text-xs font-medium"
5057
onDragStart={(e) => onDragStart(e, "custommaxpool")}

tensormap-frontend/tailwind.config.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ export default {
4444
"node-flatten": { DEFAULT: "rgb(247, 173, 20)", header: "rgb(170, 121, 24)" },
4545
"node-conv": { DEFAULT: "rgb(255, 128, 43)", header: "rgb(255, 128, 43)" },
4646
"node-dropout": { DEFAULT: "rgb(220, 80, 80)", header: "rgb(180, 50, 50)" },
47+
"node-alphadropout": { DEFAULT: "rgb(220, 53, 153)", header: "rgb(190, 24, 93)" },
4748
"node-maxpool": { DEFAULT: "rgb(34, 182, 176)", header: "rgb(20, 140, 135)" },
4849
},
4950
borderRadius: {

0 commit comments

Comments
 (0)