You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
"I'm a beginner, and while working on 'Inverse problem for the Lorenz system' and 'Inverse problem for the diffusion-reaction system: Problem setup,' I noticed that these two code segments are quite similar but have significantly different execution speeds. Despite searching extensively, I'm still unclear about the specific reason behind this difference. I'm hoping to receive guidance and help from the community. Your assistance would be greatly appreciated."
Sure, here is the translation of the code portion for "Inverse Problem for the Lorenz System":
"""Backend supported: tensorflow.compat.v1, tensorflow, pytorch, jax"""
import deepxde as dde
import numpy as np
def gen_traindata():
data = np.load("../dataset/Lorenz.npz")
return data["t"], data["y"]
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
Uh oh!
There was an error while loading. Please reload this page.
-
"I'm a beginner, and while working on 'Inverse problem for the Lorenz system' and 'Inverse problem for the diffusion-reaction system: Problem setup,' I noticed that these two code segments are quite similar but have significantly different execution speeds. Despite searching extensively, I'm still unclear about the specific reason behind this difference. I'm hoping to receive guidance and help from the community. Your assistance would be greatly appreciated."
Sure, here is the translation of the code portion for "Inverse Problem for the Lorenz System":
"""Backend supported: tensorflow.compat.v1, tensorflow, pytorch, jax"""
import deepxde as dde
import numpy as np
def gen_traindata():
data = np.load("../dataset/Lorenz.npz")
return data["t"], data["y"]
C1 = dde.Variable(1.0)
C2 = dde.Variable(1.0)
C3 = dde.Variable(1.0)
Most backends
def Lorenz_system(x, y):
"""Lorenz system.
dy1/dx = 10 * (y2 - y1)
dy2/dx = y1 * (15 - y3) - y2
dy3/dx = y1 * y2 - 8/3 * y3
"""
y1, y2, y3 = y[:, 0:1], y[:, 1:2], y[:, 2:]
dy1_x = dde.grad.jacobian(y, x, i=0)
dy2_x = dde.grad.jacobian(y, x, i=1)
dy3_x = dde.grad.jacobian(y, x, i=2)
return [
dy1_x - C1 * (y2 - y1),
dy2_x - y1 * (C2 - y3) + y2,
dy3_x - y1 * y2 + C3 * y3,
]
Backend JAX
def Lorenz_system(x, y, unknowns=[C1, C2, C3]):
C1, C2, C3 = unknowns
y_val, y_fn = y
y1, y2, y3 = y_val[:, 0:1], y_val[:, 1:2], y_val[:, 2:3]
dy1_x, _ = dde.grad.jacobian(y, x, i=0)
dy2_x, _ = dde.grad.jacobian(y, x, i=1)
dy3_x, _ = dde.grad.jacobian(y, x, i=2)
return [
dy1_x - C1 * (y2 - y1),
dy2_x - y1 * (C2 - y3) + y2,
dy3_x - y1 * y2 + C3 * y3,
]
def boundary(_, on_initial):
return on_initial
geom = dde.geometry.TimeDomain(0, 3)
Initial conditions
ic1 = dde.icbc.IC(geom, lambda X: -8, boundary, component=0)
ic2 = dde.icbc.IC(geom, lambda X: 7, boundary, component=1)
ic3 = dde.icbc.IC(geom, lambda X: 27, boundary, component=2)
Get the train data
observe_t, ob_y = gen_traindata()
observe_y0 = dde.icbc.PointSetBC(observe_t, ob_y[:, 0:1], component=0)
observe_y1 = dde.icbc.PointSetBC(observe_t, ob_y[:, 1:2], component=1)
observe_y2 = dde.icbc.PointSetBC(observe_t, ob_y[:, 2:3], component=2)
data = dde.data.PDE(
geom,
Lorenz_system,
[ic1, ic2, ic3, observe_y0, observe_y1, observe_y2],
num_domain=400,
num_boundary=2,
anchors=observe_t,
)
net = dde.nn.FNN([1] + [40] * 3 + [3], "tanh", "Glorot uniform")
model = dde.Model(data, net)
external_trainable_variables = [C1, C2, C3]
variable = dde.callbacks.VariableValue(
external_trainable_variables, period=600, filename="variables.dat"
)
train adam
model.compile(
"adam", lr=0.001, external_trainable_variables=external_trainable_variables
)
losshistory, train_state = model.train(iterations=20000, callbacks=[variable])
train lbfgs
model.compile("L-BFGS", external_trainable_variables=external_trainable_variables)
losshistory, train_state = model.train(callbacks=[variable])
dde.saveplot(losshistory, train_state, issave=True, isplot=True)
here is the translation of the code portion for "Inverse problem for the diffusion-reaction system":
"""Backend supported: tensorflow.compat.v1, tensorflow, pytorch, paddle"""
import deepxde as dde
import numpy as np
def gen_traindata():
data = np.load("../dataset/reaction.npz")
t, x, ca, cb = data["t"], data["x"], data["Ca"], data["Cb"]
X, T = np.meshgrid(x, t)
X = np.reshape(X, (-1, 1))
T = np.reshape(T, (-1, 1))
Ca = np.reshape(ca, (-1, 1))
Cb = np.reshape(cb, (-1, 1))
return np.hstack((X, T)), Ca, Cb
kf = dde.Variable(0.05)
D = dde.Variable(1.0)
def pde(x, y):
ca, cb = y[:, 0:1], y[:, 1:2]
dca_t = dde.grad.jacobian(y, x, i=0, j=1)
dca_xx = dde.grad.hessian(y, x, component=0, i=0, j=0)
dcb_t = dde.grad.jacobian(y, x, i=1, j=1)
dcb_xx = dde.grad.hessian(y, x, component=1, i=0, j=0)
eq_a = dca_t - 1e-3 * D * dca_xx + kf * ca * cb ** 2
eq_b = dcb_t - 1e-3 * D * dcb_xx + 2 * kf * ca * cb ** 2
return [eq_a, eq_b]
def fun_bc(x):
return 1 - x[:, 0:1]
def fun_init(x):
return np.exp(-20 * x[:, 0:1])
geom = dde.geometry.Interval(0, 1)
timedomain = dde.geometry.TimeDomain(0, 10)
geomtime = dde.geometry.GeometryXTime(geom, timedomain)
bc_a = dde.icbc.DirichletBC(
geomtime, fun_bc, lambda _, on_boundary: on_boundary, component=0
)
bc_b = dde.icbc.DirichletBC(
geomtime, fun_bc, lambda _, on_boundary: on_boundary, component=1
)
ic1 = dde.icbc.IC(geomtime, fun_init, lambda _, on_initial: on_initial, component=0)
ic2 = dde.icbc.IC(geomtime, fun_init, lambda _, on_initial: on_initial, component=1)
observe_x, Ca, Cb = gen_traindata()
observe_y1 = dde.icbc.PointSetBC(observe_x, Ca, component=0)
observe_y2 = dde.icbc.PointSetBC(observe_x, Cb, component=1)
data = dde.data.TimePDE(
geomtime,
pde,
[bc_a, bc_b, ic1, ic2, observe_y1, observe_y2],
num_domain=2000,
num_boundary=100,
num_initial=100,
anchors=observe_x,
num_test=50000,
)
net = dde.nn.FNN([2] + [20] * 3 + [2], "tanh", "Glorot uniform")
model = dde.Model(data, net)
model.compile("adam", lr=0.001, external_trainable_variables=[kf, D])
variable = dde.callbacks.VariableValue([kf, D], period=1000, filename="variables.dat")
losshistory, train_state = model.train(iterations=80000, callbacks=[variable])
dde.saveplot(losshistory, train_state, issave=True, isplot=True)
Beta Was this translation helpful? Give feedback.
All reactions