Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions examples/cfd/flow_reconstruction_diffusion/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,31 @@ In directory ``physicsnemo/examples/cfd/flow_reconstruction_diffusion/``, run:
python train.py --config-name=config_dfsr_train
``

or
or

(with physics-informed conditioning)

``
python train.py --config-name=config_dfsr_cond_train
``

You can also use the helper script:

``
bash setup_and_train.sh
``

Optional environment variables:

- `CONFIG_NAME` (default: `config_dfsr_train`)
- `TRAIN_EXTRA_ARGS` (default: empty; appended to `train.py`)

Example for conditional training:

``
CONFIG_NAME=config_dfsr_cond_train bash setup_and_train.sh
``

<b>Step 2 - Super-resolution</b>

In directory ``physicsnemo/examples/cfd/flow_reconstruction_diffusion/``, run:
Expand All @@ -81,4 +98,3 @@ This implementation is based on / inspired by:

- [https://github.com/ermongroup/SDEdit](https://github.com/ermongroup/SDEdit) (SDEdit: Guided Image Synthesis and Editing with Stochastic Differential Equations)
- [https://github.com/ermongroup/ddim](https://github.com/ermongroup/ddim) (Denoising Diffusion Implicit Models)

30 changes: 30 additions & 0 deletions examples/cfd/flow_reconstruction_diffusion/setup_and_train.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env bash
set -euo pipefail

# Reproducible setup + train helper for flow reconstruction diffusion.
# Run this inside a PhysicsNeMo environment.
#
# Optional environment variables:
# CONFIG_NAME (default: config_dfsr_train)
# TRAIN_EXTRA_ARGS (default: empty; appended to train.py)

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CONFIG_NAME="${CONFIG_NAME:-config_dfsr_train}"
TRAIN_EXTRA_ARGS="${TRAIN_EXTRA_ARGS:-}"

echo ">>> [0/3] Entering ${SCRIPT_DIR}"
cd "${SCRIPT_DIR}"

echo ">>> [1/3] Installing dependencies"
python -m pip install --upgrade pip
python -m pip install -r requirements.txt

echo ">>> [2/3] Starting training with --config-name ${CONFIG_NAME}"
if [[ -n "${TRAIN_EXTRA_ARGS}" ]]; then
# shellcheck disable=SC2086
python train.py --config-name "${CONFIG_NAME}" ${TRAIN_EXTRA_ARGS}
else
python train.py --config-name "${CONFIG_NAME}"
fi

echo ">>> [3/3] Done. Check configured output directory for logs and snapshots."
22 changes: 19 additions & 3 deletions examples/cfd/flow_reconstruction_diffusion/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

import hydra
import torch
from omegaconf import DictConfig
from omegaconf import DictConfig, ListConfig
from training_loop import training_loop
from misc import EasyDict

Expand All @@ -46,6 +46,21 @@
import argparse


def _to_json_serializable(obj):
"""Recursively convert OmegaConf/EasyDict-style objects to JSON-safe types."""
if isinstance(obj, (DictConfig, ListConfig)):
obj = OmegaConf.to_container(obj, resolve=True)

if isinstance(obj, dict):
return {str(k): _to_json_serializable(v) for k, v in obj.items()}
if isinstance(obj, (list, tuple)):
return [_to_json_serializable(v) for v in obj]
if isinstance(obj, (str, int, float, bool)) or obj is None:
return obj

return str(obj)


@hydra.main(version_base="1.2", config_path="conf", config_name="config")
def main(cfg: DictConfig) -> None:
"""Train diffusion-based generative model using the techniques described in the
Expand Down Expand Up @@ -285,8 +300,9 @@ def main(cfg: DictConfig) -> None:
# c.task = cfg.task

# Print options. # TODO replace prints with PhysicsNeMo logger
serialized_config = _to_json_serializable(c)
logger0.info("Training options:")
logger0.info(json.dumps(c, indent=2))
logger0.info(json.dumps(serialized_config, indent=2))
logger0.info(f"Output directory: {c.run_dir}")
logger0.info(f"Dataset path: {c.dataset_kwargs.path}")
logger0.info(f"Class-conditional: {c.dataset_kwargs.use_labels}")
Expand All @@ -306,7 +322,7 @@ def main(cfg: DictConfig) -> None:
if dist.rank == 0:
os.makedirs(c.run_dir, exist_ok=True)
with open(os.path.join(c.run_dir, "training_options.json"), "wt") as f:
json.dump(c, f, indent=2)
json.dump(serialized_config, f, indent=2)
# utils.Logger(file_name=os.path.join(c.run_dir, 'log.txt'), file_mode='a', should_flush=True)

# Train.
Expand Down