@@ -26,7 +26,9 @@ When building AI, you change many things at once: code, data, prompts, models. A
2626
2727LitLogger runs locally (coming soon), in the cloud, or on-prem. It is free for developers and integrates with [ Lightning AI] ( https://lightning.ai/ ) , but works without logging in.
2828
29- <img width =" 3024 " height =" 1716 " alt =" image " src =" https://github.com/user-attachments/assets/27f9d8f1-2a13-4080-a64f-374d957712fa " />
29+ <div align =' center ' >
30+ <img width =" 800px " style =" max-width : 100% ;" alt =" image " src =" https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/litlogger/experiment_comparison_charts.png " />
31+ </div >
3032
3133# Quick start
3234
@@ -66,7 +68,7 @@ Add LitLogger to any training framework, PyTorch, Jax, TensorFlow, Numpy, SKLear
6668
6769<div align =' center ' >
6870
69- <img alt =" LitServe " src =" https://github.com/user-attachments/assets/50d9a2f7-17d0-4448-ad21-6be600ab53fc " width =" 800px " style =" max-width : 100% ;" >
71+ <img alt =" Comparing Experiment Metrics " src =" https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/litlogger/experiment_comparison_table.png " width =" 800px " style =" max-width : 100% ;" >
7072
7173  ;
7274</div >
@@ -75,7 +77,8 @@ Add LitLogger to any training framework, PyTorch, Jax, TensorFlow, Numpy, SKLear
7577import torch
7678import torch.nn as nn
7779import torch.optim as optim
78- from lightning.pytorch.loggers import LitLogger
80+ import litlogger
81+ from litlogger import Model, File
7982import os
8083
8184# define a simple neural network
@@ -89,7 +92,9 @@ class SimpleModel(nn.Module):
8992
9093def train ():
9194 # initialize LitLogger
92- logger = LitLogger(metadata = {" task" : " model_training" , " model_name" : " SimpleModel" })
95+ experiment = litlogger.init()
96+ experiment[" task" ] = " model_training"
97+ experiment[" model_name" ] = " SimpleModel"
9398
9499 # hyperparameters
95100 num_epochs = 10
@@ -113,25 +118,25 @@ def train():
113118 optimizer.step()
114119
115120 # log training loss
116- logger.log_metrics({ " train_loss " : loss.item()} , step = epoch)
121+ experiment[ " train/loss " ].append( loss.item(), step = epoch)
117122 print (f " Epoch [ { epoch+ 1 } / { num_epochs} ], Loss: { loss.item():.4f } " )
118123
119124 # log the trained model
120- logger.log_model (model)
125+ experiment[ " model " ] = Model (model)
121126 print (" model logged." )
122127
123128 # create a dummy artifact file and log it
124129 with open (" model_config.txt" , " w" ) as f:
125130 f.write(f " learning_rate: { learning_rate} \n " )
126131 f.write(f " num_epochs: { num_epochs} \n " )
127- logger.log_model_artifact (" model_config.txt" )
132+ experiment[ " model_config " ] = File (" model_config.txt" )
128133 print (" model config artifact logged." )
129134
130135 # Clean up the dummy artifact file after logging
131136 os.remove(" model_config.txt" )
132137
133138 # finalize the logger when training is done
134- logger .finalize()
139+ experiment .finalize()
135140 print (" training complete and logger finalized." )
136141
137142if __name__ == " __main__" :
@@ -146,7 +151,7 @@ Add LitLogger to any inference engine, LitServe, vLLM, FastAPI, etc...
146151
147152<div align =' center ' >
148153
149- <img alt =" LitServe " src =" https://github.com/user-attachments/assets/ac454da2-0825-4fcf-b422-c6d3a1526cf0 " width =" 800px " style =" max-width : 100% ;" >
154+ <img alt =" Comparing Experiments Side By Side " src =" https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/litlogger/experiment_comparison_side_by_side.png " width =" 800px " style =" max-width : 100% ;" >
150155
151156  ;
152157</div >
@@ -206,7 +211,7 @@ PyTorch Lightning now comes with LitLogger natively built in. It's also built by
206211
207212<div align =' center ' >
208213
209- <img alt =" LitServe " src =" https://github.com/user-attachments/assets/43071433-c319-4fc1-ac5a-03a5c5598a88 " width =" 800px " style =" max-width : 100% ;" >
214+ <img alt =" Comparing Experiment Media " src =" https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/litlogger/experiment_comparison_media.png " width =" 800px " style =" max-width : 100% ;" >
210215
211216  ;
212217</div >
@@ -242,7 +247,7 @@ This is a fun example that simulates a long model training run.
242247
243248<div align =' center ' >
244249
245- <img alt =" LitServe " src =" https://github.com/user-attachments/assets/fd15aa32-2b56-4324-81b6-c87c86db8a3b " width =" 800px " style =" max-width : 100% ;" >
250+ <img alt =" Comparing Experiment Metrics " src =" https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/litlogger/experiment_comparison_charts.png " width =" 800px " style =" max-width : 100% ;" >
246251
247252  ;
248253</div >
0 commit comments