Skip to content

Commit 16eed83

Browse files
committed
remove double initialize
Signed-off-by: Kyle Sayers <[email protected]>
1 parent 2a59554 commit 16eed83

File tree

2 files changed

+9
-4
lines changed

2 files changed

+9
-4
lines changed

src/llmcompressor/core/lifecycle.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,13 @@ def initialize(
8484
:return: List of data returned from initialization of modifiers
8585
:rtype: List[Any]
8686
"""
87-
self.state.update(**kwargs)
88-
if self.initialized_: # TODO: do not initialize twice
89-
return
87+
if self.initialized_:
88+
raise ValueError(
89+
"Initialize was called twice. To update state values after "
90+
"initialization, please use `active_session().state.update()`"
91+
)
9092

93+
self.state.update(**kwargs)
9194
logger.debug("Initializing compression lifecycle")
9295
self.recipe_container.append(recipe, recipe_stage, recipe_args)
9396
self.modifiers = self.recipe_container.get_modifiers()

src/llmcompressor/transformers/finetune/session_mixin.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,9 @@ def create_optimizer(self):
222222
len(self.train_dataset) / total_batch_size
223223
)
224224

225-
initialize(optimizer=self.optimizer, steps_per_epoch=self.total_steps_per_epoch)
225+
active_session().state.update(
226+
optimizer=self.optimizer, steps_per_epoch=self.total_steps_per_epoch
227+
)
226228

227229
return self.optimizer
228230

0 commit comments

Comments
 (0)