Skip to content
5 changes: 1 addition & 4 deletions src/llmcompressor/core/lifecycle.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,8 @@ def initialize(
:return: List of data returned from initialization of modifiers
:rtype: List[Any]
"""
self.state.update(**kwargs)
if self.initialized_: # TODO: do not initialize twice
return

logger.debug("Initializing compression lifecycle")
self.state.update(**kwargs)
self.recipe_container.append(recipe, recipe_stage, recipe_args)
self.modifiers = self.recipe_container.get_modifiers()
self._set_model_layer_prefix()
Expand Down
12 changes: 0 additions & 12 deletions src/llmcompressor/core/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,18 +112,6 @@ class State:
model_log_cadence: Optional[float] = None
_last_log_step: Union[float, int, None] = None

@property
def compression_ready(self) -> bool:
"""
Check if the model and optimizer are set for compression.

:return: True if model and optimizer are set, False otherwise
:rtype: bool
"""
ready = self.model is not None and self.optimizer is not None
logger.debug("Compression ready: {}", ready)
return ready

def update(
self,
model: Any = None,
Expand Down
10 changes: 0 additions & 10 deletions tests/unit/core/test_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,6 @@ def test_state_update():
assert state.model_log_cadence == 2


@pytest.mark.regression
def test_state_sparsification_ready():
state = State()
assert not state.compression_ready

state.model = "model"
state.optimizer = "optimizer"
assert state.compression_ready


@pytest.mark.regression
def test_state_update_loggers():
state = State()
Expand Down