Skip to content
Open
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 13 additions & 10 deletions examples/keras_recipes/bayesian_neural_networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Title: Probabilistic Bayesian Neural Networks
Author: [Khalid Salama](https://www.linkedin.com/in/khalid-salama-24403144/)
Date created: 2021/01/15
Last modified: 2021/01/15
Last modified: 2026/01/11
Description: Building probabilistic Bayesian neural network models with TensorFlow Probability.
Accelerator: GPU
"""
Expand Down Expand Up @@ -53,8 +53,8 @@

import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import tf_keras as keras
from tf_keras import layers
import tensorflow_datasets as tfds
import tensorflow_probability as tfp

Expand Down Expand Up @@ -225,7 +225,8 @@ def prior(kernel_size, bias_size, dtype=None):
[
tfp.layers.DistributionLambda(
lambda t: tfp.distributions.MultivariateNormalDiag(
loc=tf.zeros(n), scale_diag=tf.ones(n)
loc=tf.zeros(n, dtype=tf.float32),
scale_diag=tf.ones(n, dtype=tf.float32),
)
)
]
Expand All @@ -241,9 +242,9 @@ def posterior(kernel_size, bias_size, dtype=None):
posterior_model = keras.Sequential(
[
tfp.layers.VariableLayer(
tfp.layers.MultivariateNormalTriL.params_size(n), dtype=dtype
tfp.layers.MultivariateNormalTriL.params_size(n), dtype=tf.float32
),
tfp.layers.MultivariateNormalTriL(n),
tfp.layers.MultivariateNormalTriL(n, dtype=tf.float32),
]
)
return posterior_model
Expand All @@ -268,6 +269,7 @@ def create_bnn_model(train_size):
make_posterior_fn=posterior,
kl_weight=1 / train_size,
activation="sigmoid",
dtype=tf.float32,
)(features)

# The output is deterministic: a single point estimate.
Expand Down Expand Up @@ -373,13 +375,14 @@ def create_probablistic_bnn_model(train_size):
make_posterior_fn=posterior,
kl_weight=1 / train_size,
activation="sigmoid",
dtype=tf.float32,
)(features)

# Create a probabilisticå output (Normal distribution), and use the `Dense` layer
# Create a probabilistic output (Normal distribution), and use the `Dense` layer
# to produce the parameters of the distribution.
# We set units=2 to learn both the mean and the variance of the Normal distribution.
distribution_params = layers.Dense(units=2)(features)
outputs = tfp.layers.IndependentNormal(1)(distribution_params)
distribution_params = layers.Dense(units=2, dtype=tf.float32)(features)
outputs = tfp.layers.IndependentNormal(1, dtype=tf.float32)(distribution_params)

model = keras.Model(inputs=inputs, outputs=outputs)
return model
Expand All @@ -394,7 +397,7 @@ def create_probablistic_bnn_model(train_size):


def negative_loglikelihood(targets, estimated_distribution):
return -estimated_distribution.log_prob(targets)
return -estimated_distribution.log_prob(tf.cast(targets, tf.float32))


num_epochs = 1000
Expand Down