We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0e35757 commit dc6918cCopy full SHA for dc6918c
deepxde/utils/pytorch.py
@@ -7,6 +7,10 @@ class LLAAF(torch.nn.Module):
7
"""Pytorch implementation of layer-wise locally adaptive
8
activation functions (L-LAAF).
9
10
+ Args:
11
+ activation: The activation function to use.
12
+ n: The scaling factor.
13
+
14
Examples:
15
16
To define a L-LAAF ReLU with the scaling factor ``n = 10``:
@@ -24,13 +28,6 @@ class LLAAF(torch.nn.Module):
24
28
"""
25
29
26
30
def __init__(self, activation, n):
27
- """
- Initialize the L-LAAF module.
-
- Args:
31
- activation: The activation function to use.
32
- n: The scaling factor.
33
34
super().__init__()
35
self.activation = activation
36
self.n = n
0 commit comments