Skip to content

Commit 7d7b14e

Browse files
committed
changes to neural
1 parent 483bf96 commit 7d7b14e

File tree

5 files changed

+140
-17
lines changed

5 files changed

+140
-17
lines changed

src/FEA/direct_displacement_solver.jl

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,17 @@ abstract type AbstractFEASolver end
22

33
abstract type AbstractDisplacementSolver <: AbstractFEASolver end
44

5-
@params mutable struct DirectDisplacementSolver{T,dim,TP<:AbstractPenalty{T}} <:
5+
mutable struct DirectDisplacementSolver{T,dim,TP1<:AbstractPenalty{T},TP2<:StiffnessTopOptProblem{dim,T}, TG <: GlobalFEAInfo{T}, TE <: ElementFEAInfo{dim, T}, Tu <: AbstractVector{T}} <:
66
AbstractDisplacementSolver
7-
problem::StiffnessTopOptProblem{dim,T}
8-
globalinfo::GlobalFEAInfo{T}
9-
elementinfo::ElementFEAInfo{dim,T}
10-
u::AbstractVector{T}
11-
lhs::AbstractVector{T}
12-
rhs::AbstractVector{T}
13-
vars::AbstractVector{T}
14-
penalty::TP
15-
prev_penalty::TP
7+
problem::TP2
8+
globalinfo::TG
9+
elementinfo::TE
10+
u::Tu
11+
lhs::Tu
12+
rhs::Tu
13+
vars::Tu
14+
penalty::TP1
15+
prev_penalty::TP1
1616
xmin::T
1717
qr::Bool
1818
end

src/Functions/Functions.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,10 @@ export Volume,
3434
apply_boundary_with_zerodiag!,
3535
apply_boundary_with_meandiag!,
3636
NeuralNetwork,
37-
PredictFunction,
3837
TrainFunction,
38+
PredictFunction,
39+
NNParams,
40+
Coordinates,
3941
StressTensor,
4042
ElementStressTensor,
4143
MaterialInterpolation,

src/Functions/neural.jl

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
struct Coordinates{C}
2+
coords::C
3+
end
4+
struct NNParams{W}
5+
p::W
6+
end
7+
18
function getcentroids(problem::AbstractTopOptProblem)
29
dh = problem.ch.dh
310
return map(CellIterator(dh)) do cell
@@ -15,20 +22,29 @@ abstract type AbstractMLModel end
1522
init_params::Any
1623
params_to_out::Any
1724
in_to_out::Any
25+
centroids::Any
1826
end
1927
function NeuralNetwork(nn_model, input_coords::AbstractVector)
2028
f = x -> nn_model(x)[1]
2129
@assert all(0 .<= f.(input_coords) .<= 1)
2230
p, re = Flux.destructure(nn_model)
2331
return NeuralNetwork(
24-
nn_model, Float64.(p), p -> getindex.(re(p).(input_coords), 1), nn_model
32+
nn_model,
33+
Float64.(p),
34+
p -> getindex.(re(p).(input_coords), 1),
35+
nn_model,
36+
input_coords,
2537
)
2638
end
27-
function NeuralNetwork(nn_model, problem::AbstractTopOptProblem)
39+
function NeuralNetwork(nn_model, problem::AbstractTopOptProblem; scale = true)
2840
centroids = getcentroids(problem)
29-
m, s = mean(centroids), std(centroids)
30-
scentroids = map(centroids) do c
31-
(c .- m) ./ s
41+
if scale
42+
m, s = mean(centroids), std(centroids)
43+
scentroids = map(centroids) do c
44+
(c .- m) ./ s
45+
end
46+
else
47+
scentroids = centroids
3248
end
3349
return NeuralNetwork(nn_model, scentroids)
3450
end
@@ -46,3 +62,9 @@ end
4662
function (tf::TrainFunction)(p)
4763
return PseudoDensities(tf.model.params_to_out(p))
4864
end
65+
66+
function (ml::NeuralNetwork)(x::AbstractVector{<:Coordinates})
67+
return PredictFunction(ml).(x)
68+
end
69+
(ml::NeuralNetwork)(x::Coordinates) = PredictFunction(ml)(x.coords)
70+
(ml::NeuralNetwork)(x::NNParams) = TrainFunction(ml)(x.p)

test/examples/neural.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ comp = Compliance(solver)
2424
volfrac = Volume(solver)
2525

2626
m = 20
27-
act = relu
27+
act = leakyrelu
2828
nn = Chain(Dense(2, m, act), Dense(m, m, act), Dense(m, m, act), Dense(m, 1, sigmoid))
2929
nn_model = NeuralNetwork(nn, problem)
3030
tf = TrainFunction(nn_model)

test/examples/neural2.jl

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
using TopOpt, Zygote, Flux
2+
3+
E = 1.0 # Young’s modulus
4+
v = 0.3 # Poisson’s ratio
5+
f = 1.0 # downward force
6+
els = (160, 40)
7+
8+
problem = PointLoadCantilever(Val{:Linear}, els, (1.0, 1.0), E, v, f)
9+
10+
# problem settings
11+
V = 0.5
12+
xmin = 1e-6
13+
rmin = 3.0
14+
15+
# SIMP penalty
16+
p = 1.0
17+
delta_p = 0.01
18+
p_max = 5.0
19+
20+
penalty = TopOpt.PowerPenalty(p)
21+
solver = FEASolver(Direct, problem; xmin, penalty)
22+
cheqfilter = DensityFilter(solver; rmin)
23+
comp = Compliance(solver)
24+
volfrac = Volume(solver)
25+
26+
# constraint aggregation penalty
27+
alpha = 0.1
28+
delta_alpha = 0.05
29+
alpha_max = 100
30+
31+
# neural network
32+
m = 20
33+
act = leakyrelu
34+
nn = NeuralNetwork(
35+
Chain(
36+
Dense(2, m, act, init = Flux.glorot_normal),
37+
Dense(m, m, act, init = Flux.glorot_normal),
38+
Dense(m, m, act, init = Flux.glorot_normal),
39+
Dense(m, m, act, init = Flux.glorot_normal),
40+
Dense(m, m, act, init = Flux.glorot_normal),
41+
Dense(m, m, act, init = Flux.glorot_normal),
42+
softmax,
43+
x -> [x[1]],
44+
),
45+
problem,
46+
scale = true,
47+
)
48+
w0 = nn.init_params
49+
# w0 ./= 10
50+
51+
C0 = comp(cheqfilter(PseudoDensities(fill(V, TopOpt.getncells(problem)))))
52+
53+
# optimization
54+
alg = Flux.Optimise.Adam(0.1)
55+
clip_alg = Flux.Optimise.ClipValue(1.0)
56+
w = copy(w0)
57+
Δ = copy(w)
58+
proj = HeavisideProjection(0.0)
59+
60+
# termination criteria
61+
eps = Inf
62+
eps_star = 0.05
63+
maxiter = 100
64+
epoch = 1
65+
constr_tol = 0.01
66+
violation = Inf
67+
todensities(w; filter = true) = filter ? PseudoDensities(proj.(cheqfilter(nn(NNParams(w))).x)) : PseudoDensities(proj.(nn(NNParams(w)).x))
68+
while true
69+
epoch > maxiter && break
70+
eps < eps_star && violation < constr_tol && break
71+
global penalty = TopOpt.PowerPenalty(p)
72+
global solver = FEASolver(Direct, problem; xmin, penalty)
73+
global cheqfilter = DensityFilter(solver; rmin)
74+
global comp = Compliance(solver)
75+
global volfrac = Volume(solver)
76+
77+
global obj = w -> comp(todensities(w, filter = true)) / C0
78+
global constr = w -> volfrac(todensities(w, filter = false)) / V - 1
79+
global combined_obj = w -> obj(w) + alpha * constr(w)^2
80+
81+
global Δ = Zygote.gradient(combined_obj, w)[1]
82+
@info "grad norm: $(norm(Δ))"
83+
Flux.Optimise.apply!(clip_alg, w, Δ)
84+
Flux.Optimise.apply!(alg, w, Δ)
85+
global w = w - Δ
86+
violation = constr(w)
87+
global alpha = min(alpha_max, alpha + delta_alpha)
88+
global p = min(p_max, p + delta_p)
89+
global epoch += 1
90+
global x = todensities(w; filter = false)
91+
global eps = sum(0.05 .< x.x .< 0.95) / length(x.x)
92+
@info "eps = $eps"
93+
@info "obj = $(comp(todensities(w; filter = true)))"
94+
@info "constr = $(volfrac(x) - V)"
95+
@show alpha
96+
end
97+
98+
using Images, ImageInTerminal
99+
reshape(Gray.(1 .- x), els...)'

0 commit comments

Comments
 (0)