2020 import pandas as pd
2121 import matplotlib .pyplot as plt
2222 import keras
23- from tqdm import tqdm
2423
2524 from intro_example .inverse_kinematics import InverseKinematicsModel
2625
@@ -171,14 +170,14 @@ def _():
171170 mo .md (r"""
172171 ## SBI in [BayesFlow](https://bayesflow.org)
173172
174- BayesFlow is a flexible python library for simulation-based inference with neural networks, including diffusion models.
173+ BayesFlow is a flexible Python library for simulation-based inference with neural networks, including diffusion models.
175174 """ )
176175 return
177176
178177
179178@app .cell (hide_code = True )
180179def _ ():
181- mo .image (src = "static/images/bayesflow_landing_light.jpg " , caption = "BayesFlow: a Python library for simulation-based Amortized Bayesian Inference with neural networks. " )
180+ mo .image (src = "static/images/bf_landing_light.png " , caption = "BayesFlow: a Python library for simulation-based amortized Bayesian inference with neural networks. " )
182181 return
183182
184183
@@ -234,10 +233,10 @@ def _():
234233
235234
236235@app .function
237- def prior ():
236+ def prior () -> dict [ str , np . ndarray ] :
238237 """
239238 Generates a random draw from a 4-dimensional Gaussian prior distribution with a
240- spherical convariance matrix. The parameters represent a robot's arm
239+ spherical covariance matrix. The parameters represent a robot's arm
241240 configuration, with the first parameter indicating the arm's height and the
242241 remaining three are angles.
243242
@@ -255,7 +254,7 @@ def _():
255254 # Inverse Kinematics
256255 def observation_model (
257256 parameters
258- ) -> np .ndarray :
257+ ) -> dict [ str , np .ndarray ] :
259258 """
260259 Returns the 2D coordinates of a robot arm given parameter vector.
261260 The first parameter represents the arm's height and the remaining three
@@ -300,7 +299,6 @@ def _(bf, observation_model):
300299 # now we create the simulator and generate training data
301300 n_simulations = 10000
302301 training_data = simulator .sample (n_simulations )
303- prior_samples = training_data ['parameters' ]
304302
305303 print (f"Generated { n_simulations } simulations" )
306304 print (f"Data shape: { training_data ['observables' ].shape } " )
@@ -373,7 +371,7 @@ def plot_params_kinematic(params, params2=None):
373371 _ax [2 ].set_title ('Random Simulation 3' )
374372 plt .show ()
375373
376- def plot_predicitve_distribution (posterior_samples , obs ):
374+ def plot_arm_posterior (posterior_samples , obs ):
377375 _ , _ax = plt .subplots (figsize = (3 ,3 ))
378376
379377 _m = InverseKinematicsModel (
@@ -384,9 +382,9 @@ def plot_predicitve_distribution(posterior_samples, obs):
384382 obs ['observables' ][0 , ::- 1 ],
385383 exemplar_color = "#E7298A"
386384 )
387- _ax .set_title ('Posterior Predictive ' )
385+ _ax .set_title ('Arm Configurations ' )
388386 plt .show ()
389- return plot_params_kinematic , plot_predicitve_distribution
387+ return plot_arm_posterior , plot_params_kinematic
390388
391389
392390@app .cell
@@ -497,8 +495,8 @@ def _():
497495@app .cell
498496def _ (
499497 obs ,
498+ plot_arm_posterior ,
500499 plot_params_kinematic ,
501- plot_predicitve_distribution ,
502500 training_data ,
503501 workflow_kinematics_diffusion ,
504502):
@@ -514,7 +512,7 @@ def _(
514512 )
515513
516514 # simulate the posterior samples and plot them
517- plot_predicitve_distribution (posterior_samples_single , obs )
515+ plot_arm_posterior (posterior_samples_single , obs )
518516 return
519517
520518
@@ -557,7 +555,7 @@ def _():
557555
558556
559557@app .cell (hide_code = True )
560- def _ (workflow_kinematics_diffusion ):
558+ def _ ():
561559 t_slider_backward = mo .ui .slider (
562560 start = 0.0 ,
563561 stop = 1.0 ,
@@ -568,11 +566,6 @@ def _(workflow_kinematics_diffusion):
568566 debounce = True
569567 )
570568
571- priors = workflow_kinematics_diffusion .approximator .inference_network .base_distribution .sample (100 )
572- if 'inference_variables' in workflow_kinematics_diffusion .approximator .standardize_layers :
573- priors = workflow_kinematics_diffusion .approximator .standardize_layers ["inference_variables" ](priors , forward = False ) # to original space
574- priors = keras .ops .convert_to_numpy (priors )
575-
576569 t_slider_backward
577570 return (t_slider_backward ,)
578571
@@ -718,16 +711,16 @@ def _():
718711
719712@app .cell (hide_code = True )
720713def _ ():
721- mo .image (src = "static/images/fm_cm_visual.pdf" , caption = "Flow Matching and Consistency Models as alternative parameterizations of diffusion models." )
714+ mo .image (src = "static/images/fm_cm_visual.pdf" , caption = "Flow matching and consistency models as alternative parameterizations of diffusion models." )
722715 return
723716
724717
725718@app .cell (hide_code = True )
726719def _ ():
727720 mo .md (r"""
728721 Related generative models can be considered a different *parameterization* of a **diffusion model**:
729- - **flow matching**: we directly predict the vector field of the deterministic reverse path
730- - **consistency models**: designed for very fast sampling by learning to jump directly to clean samples
722+ - **Flow matching**: we directly predict the vector field of the deterministic reverse path
723+ - **Consistency models**: designed for very fast sampling by learning to jump directly to clean samples
731724
732725 All of these models are available in BayesFlow. So let's load some trained models and compare their performance on our inverse kinematics problem!
733726 """ )
@@ -810,9 +803,9 @@ def _(obs, workflows):
810803@app .cell (hide_code = True )
811804def _ ():
812805 mo .md (r"""
813- There are lots of choices to make when designing a diffusion model
814- - which noise schedule?
815- - diffusion model vs. other generative models?
806+ There are lots of choices to make when designing a diffusion model, e.g.:
807+ - Which noise schedule?
808+ - Diffusion model vs. other generative models?
816809
817810 We explained and benchmarked them extensively in [Arruda et al. (2025)](https://arxiv.org/abs/2512.20685):
818811 """ )
@@ -921,17 +914,12 @@ def _():
921914
922915
923916@app .cell (hide_code = True )
924- def _ (alpha , mode , obs , ops , strength , workflow_kinematics_diffusion ):
917+ def _ (mode , obs , strength , workflow_kinematics_diffusion ):
925918 # Draw samples with and without guidance for side-by-side comparison
926919 constraints = [elbow_up_down_constraint (
927920 workflow_kinematics_diffusion , target = str (mode .value )
928921 )]
929922
930- def scaling_function (t ):
931- log_snr = workflow_kinematics_diffusion .approximator .inference_network .noise_schedule .get_log_snr (t , training = False )
932- _ , sigma_t = workflow_kinematics_diffusion .approximator .inference_network .noise_schedule .get_alpha_sigma (log_snr )
933- return ops .square (alpha ) / ops .square (sigma_t )
934-
935923 theta_unguided = workflow_kinematics_diffusion .sample (
936924 conditions = obs ,
937925 num_samples = 300 ,
@@ -970,15 +958,6 @@ def scaling_function(t):
970958 ax [0 ].set_title ("Posterior samples" )
971959 ax [1 ].set_title (f"Guided posterior samples ({ mode .value } , λ={ strength .value } )" )
972960 plt .show ()
973-
974- # Also show parameter-pair plots for guided vs unguided
975- #bf.diagnostics.pairs_posterior(
976- # estimates=theta_guided_np,
977- # priors=theta_unguided_np,
978- # variable_names=variable_names_nice,
979- # height=1.75,
980- # label='Guided Posterior'
981- #)
982961 return
983962
984963
@@ -992,27 +971,27 @@ def _():
992971 **Main takeaways:**
993972
994973 1. **SBI is likelihood-free Bayesian inference**:
995- we avoid evaluating $p(\mathbf{x}\mid\boldsymbol\theta)$ and train purely from simulations.
974+ We avoid evaluating $p(\mathbf{x}\mid\boldsymbol\theta)$ and train purely from simulations.
996975
997976 2. **Amortization makes inference cheap at test time**:
998- after training, we can sample approximate posteriors for new observations instantly.
977+ After training, we can sample approximate posteriors for new observations instantly.
999978
1000979 3. **Diffusion models provide strong posterior expressiveness**:
1001- iterative denoising can represent complex and multimodal posteriors more reliably than many single-pass density models.
980+ Iterative denoising can represent complex and multimodal posteriors more reliably than many single-pass density models.
1002981
1003982 4. **Diffusion models allow post-hoc intervention**:
1004- we can compose information from multiple sources or apply constraints at inference time.
983+ We can compose information from multiple sources or apply constraints at inference time.
1005984
1006985
1007986 **Recommended next steps:**
1008987
1009- Check out the [BayesFlow Documentation ](https://bayesflow.org) for
988+ Check out [BayesFlow](https://bayesflow.org) for
1010989 - exploring learned summary networks for higher-dimensional observations,
1011990 - evaluating calibration and coverage using diagnostic tools, like simulation-based calibration.
1012991
1013992 **Further reading:**
1014993
1015- Arruda et al. (2025): [Diffusion Models In Simulation-Based Inference: A Tutorial Review](https://bayesflow-org.github.io/diffusion-experiments/)
994+ [Diffusion Models In Simulation-Based Inference: A Tutorial Review](https://bayesflow-org.github.io/diffusion-experiments/)
1016995 """ )
1017996 return
1018997
0 commit comments