Skip to content

Commit 8bcc9a3

Browse files
sn.get_bibliography() is added
1 parent 19a47e3 commit 8bcc9a3

File tree

7 files changed

+131
-12
lines changed

7 files changed

+131
-12
lines changed

examples/example-fitting-fourier.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
# Each network is defined by Functional.
3232
y1 = sn.Field('y1', 10)
3333
y2 = sn.Field('y2', 10)
34-
y1, y2 = sn.Functional([y1,y2], xf, [10, 10, 10], 'tanh', output_activation='tanh')
34+
y1, y2 = sn.Functional([y1,y2], xf, [10, 10, 10], 'l-tanh', output_activation='tanh')
3535

3636
y = sn.Functional('y', [xf*y1, xf*y2])
3737

@@ -59,3 +59,4 @@
5959
y_pred = y.eval(model, x_true)
6060
d_pred = d.eval(model, x_true)
6161

62+
sn.get_bibliography(format="bibtex") #bibtexml

sciann/__init__.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from .utils.utilities import set_default_log_path, get_default_log_path
2626
from .utils.utilities import set_random_seed
2727
from .utils.utilities import set_floatx
28+
from .utils.utilities import get_bibliography
2829

2930
# SciANN.
3031
__author__ = "Ehsan Haghighat"
@@ -33,17 +34,15 @@
3334
__credits__ = []
3435
__url__ = "http://github.com/sciann/sciann]"
3536
__license__ = "MIT"
36-
__version__ = "0.6.0.0"
37+
__version__ = "0.6.0.1"
3738
__cite__ = \
38-
'@misc{haghighat2019sciann, \n' +\
39-
' title={SciANN: A Keras/Tensorflow wrapper for scientific computations and physics-informed deep learning using artificial neural networks}, \n' +\
40-
' author={Ehsan Haghighat and Ruben Juanes}, \n' +\
41-
' year={2020}, \n' +\
42-
' eprint={2005.08803}, \n' +\
43-
' archivePrefix={arXiv}, \n' +\
44-
' primaryClass={cs.OH}, \n' +\
45-
' url = {https://arxiv.org/abs/2005.08803}' +\
46-
' howpublished={https://github.com/sciann/sciann.git}' +\
39+
'@article{haghighat2021sciann, \n' +\
40+
' title={SciANN: A Keras/TensorFlow wrapper for scientific computations and physics-informed deep learning using artificial neural networks}, \n' +\
41+
' author={Haghighat, Ehsan and Juanes, Ruben}, \n' +\
42+
' journal={Computer Methods in Applied Mechanics and Engineering}, \n' +\
43+
' year={2021}, \n' +\
44+
' url = {https://doi.org/10.1016/j.cma.2020.113552}, \n' +\
45+
' howpublished={https://github.com/sciann/sciann.git}, \n' +\
4746
'}'
4847

4948
# Import message.
@@ -67,3 +66,4 @@
6766

6867
# set default logging directory.
6968
set_default_log_path(os.path.join(os.getcwd(), "logs"))
69+
initialize_bib(os.path.join(os.path.dirname(__file__), 'references', 'bibliography'))

sciann/references/bibliography

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
@article{haghighat2021sciann,
2+
title={SciANN: A Keras/TensorFlow wrapper for scientific computations and physics-informed deep learning using artificial neural networks},
3+
author={Haghighat, Ehsan and Juanes, Ruben},
4+
journal={Computer Methods in Applied Mechanics and Engineering},
5+
volume={373},
6+
pages={113552},
7+
year={2021},
8+
publisher={Elsevier},
9+
url={https://www.sciencedirect.com/science/article/pii/S0045782520307374}
10+
}
11+
12+
@article{raissi2019physics,
13+
title={Physics-informed neural networks: A deep learning framework for solving forward and inverse problems involving nonlinear partial differential equations},
14+
author={Raissi, Maziar and Perdikaris, Paris and Karniadakis, George E},
15+
journal={Journal of Computational Physics},
16+
volume={378},
17+
pages={686--707},
18+
year={2019},
19+
publisher={Elsevier},
20+
url={https://www.sciencedirect.com/science/article/pii/S0021999118307125}
21+
}
22+
23+
@article{wang2020gp,
24+
title={Understanding and mitigating gradient pathologies in physics-informed neural networks},
25+
author={Wang, Sifan and Teng, Yujun and Perdikaris, Paris},
26+
journal={arXiv preprint arXiv:2001.04536},
27+
year={2020},
28+
url={https://arxiv.org/abs/2001.04536}
29+
}
30+
31+
@article{wang2020ntk,
32+
title={When and why PINNs fail to train: A neural tangent kernel perspective},
33+
author={Wang, Sifan and Yu, Xinling and Perdikaris, Paris},
34+
journal={arXiv preprint arXiv:2007.14527},
35+
year={2020},
36+
url={https://arxiv.org/abs/2007.14527}
37+
}
38+
39+
@article{li2017visualizing,
40+
title={Visualizing the loss landscape of neural nets},
41+
author={Li, Hao and Xu, Zheng and Taylor, Gavin and Studer, Christoph and Goldstein, Tom},
42+
journal={arXiv preprint arXiv:1712.09913},
43+
year={2017},
44+
url={https://arxiv.org/abs/1712.09913}
45+
}
46+
47+
@article{wang2020eigenvector,
48+
title={On the eigenvector bias of Fourier feature networks: From regression to solving multi-scale PDEs with physics-informed neural networks},
49+
author={Wang, Sifan and Wang, Hanwen and Perdikaris, Paris},
50+
journal={arXiv preprint arXiv:2012.10047},
51+
year={2020},
52+
url={https://arxiv.org/abs/2012.10047}
53+
}
54+
55+
@article{jagtap2020locally,
56+
title={Locally adaptive activation functions with slope recovery for deep and physics-informed neural networks},
57+
author={Jagtap, Ameya D and Kawaguchi, Kenji and Em Karniadakis, George},
58+
journal={Proceedings of the Royal Society A},
59+
volume={476},
60+
number={2239},
61+
pages={20200334},
62+
year={2020},
63+
publisher={The Royal Society},
64+
url={https://royalsocietypublishing.org/doi/abs/10.1098/rspa.2020.0334}
65+
}
66+

sciann/utils/callbacks.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
import numpy as np
1515

1616
from .utilities import unpack_singleton, to_list, get_log_path
17+
from .utilities import append_to_bib
1718
from .math import tf_gradients
1819

1920

@@ -60,6 +61,7 @@ def __init__(self, model, inputs, targets, weights,
6061
beta=0.1, freq=100, log_freq=None,
6162
hessian=False, types=None, **kwargs):
6263
super(GradientPathologyLossWeight, self).__init__()
64+
append_to_bib("wang2020gp")
6365
# limit number of samples for performance concerns.
6466
if inputs[0].shape[0] > 20000:
6567
sample_ids = np.random.choice(inputs[0].shape[0], 20000, replace=False)
@@ -358,6 +360,7 @@ def __init__(self, model, inputs, targets, weights,
358360
norm=2, resolution=11, layer_wise=True,
359361
path=None, trials=1):
360362
super(LossLandscapeHistory, self).__init__()
363+
append_to_bib("li2017visualizing")
361364
self._model = model
362365
self._inputs = inputs
363366
self._layers = [layer for layer in model._layers if layer.weights]
@@ -491,6 +494,7 @@ def __init__(self, model, inputs, targets, weights=None,
491494
beta=0.1, freq=100, log_freq=None,
492495
hessian=False, types=None, **kwargs):
493496
super(NTKLossWeight, self).__init__()
497+
append_to_bib("wang2020ntk")
494498
if weights is None:
495499
weights = [np.ones(inputs[0].shape[0]) for i in range(len(model.outputs))]
496500
# limit number of samples for performance concerns.

sciann/utils/math.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def fourier(f, w=10):
3535
A Functional.
3636
"""
3737
validate_variable(f)
38+
append_to_bib("wang2020eigenvector")
3839
layers = []
3940
outputs = []
4041
for fi in f.outputs:

sciann/utils/utilities.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,13 @@
2929
from .initializers import SciBiasInitializer as BInitializer
3030
from .activations import get_activation, SciActivation, SciActivationLayer
3131

32+
from pybtex.database.input import bibtex
33+
34+
3235
_DEFAULT_LOG_PATH = ""
36+
_BIBLIOGRAPHY = None
37+
_BIBLIOGRAPHY_TO_OUTPUT = []
38+
3339

3440
def _is_tf_1():
3541
return tf.__version__.startswith('1.')
@@ -70,6 +76,45 @@ def get_default_log_path():
7076
return _DEFAULT_LOG_PATH
7177

7278

79+
def initialize_bib(bib_file):
80+
global _BIBLIOGRAPHY
81+
global _BIBLIOGRAPHY_TO_OUTPUT
82+
_BIBLIOGRAPHY = bibtex.Parser().parse_file(bib_file)
83+
_BIBLIOGRAPHY_TO_OUTPUT.append(_BIBLIOGRAPHY.entries['haghighat2021sciann'])
84+
_BIBLIOGRAPHY_TO_OUTPUT.append(_BIBLIOGRAPHY.entries['raissi2019physics'])
85+
86+
87+
def append_to_bib(bib_entery):
88+
global _BIBLIOGRAPHY
89+
global _BIBLIOGRAPHY_TO_OUTPUT
90+
bib = _BIBLIOGRAPHY.entries[bib_entery]
91+
if bib not in _BIBLIOGRAPHY_TO_OUTPUT:
92+
_BIBLIOGRAPHY_TO_OUTPUT.append(bib)
93+
94+
95+
def get_bibliography(format='bibtex', file_name=None):
96+
"""Returns the bibliography based on the feastures you used in your model.
97+
98+
# Argument
99+
format: 'bibtex', 'bibtextml', 'yaml', ...
100+
check `pybtex` documentation for other options.
101+
default: 'bibtex'
102+
file_name: path to a file.
103+
default: None. This results in printing the bib file in the outputs.
104+
"""
105+
global _BIBLIOGRAPHY_TO_OUTPUT
106+
bib = ""
107+
for b in _BIBLIOGRAPHY_TO_OUTPUT:
108+
bib += str(b.to_string(format)) + '\n'
109+
110+
if file_name is None:
111+
print(bib)
112+
else:
113+
with open(file_name,'w') as f:
114+
f.write(bib)
115+
f.close()
116+
117+
73118
def get_log_path(path=None, prefix=None):
74119
file_path = _DEFAULT_LOG_PATH if path is None else path
75120
if not os.path.isdir(file_path):
@@ -169,6 +214,7 @@ def prepare_default_activations_and_initializers(actfs, seed=None):
169214
if len(lay_actf) == 1:
170215
activations.append(SciActivation(w, f))
171216
else:
217+
append_to_bib("jagtap2020locally")
172218
activations.append(SciActivationLayer(w, f))
173219

174220
return activations, bias_initializer, kernel_initializer

setup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
setup(
1717
name='SciANN',
18-
version='0.6.0.0',
18+
version='0.6.0.1',
1919
description='A Keras/Tensorflow wrapper for scientific computations and physics-informed deep learning using artificial neural networks.',
2020
long_description=long_description,
2121
author='Ehsan Haghighat',
@@ -28,6 +28,7 @@
2828
'pyyaml',
2929
'h5py',
3030
'sklearn',
31+
'pybtex',
3132
'tensorflow>=2.1.0',
3233
],
3334
extras_require={

0 commit comments

Comments
 (0)