Skip to content

Commit e6fdd06

Browse files
authored
Remove dead code (#2435)
## Purpose ## * Remove dead code ## Changes ## * Remove `save_checkpoint` (this is now done by [post_process](https://github.com/vllm-project/llm-compressor/blob/main/src/llmcompressor/entrypoints/utils.py#L95)) * Remove `get_completed_stages`, `save_completed_stages` (stages no longer exist) * Remove `load_safetensors_state_dict` (we now either load with the transformers model definition or `model_free_ptq`) * Remove `set_deterministic_seeds` (not used) * Remove `is_package_available` Signed-off-by: Kyle Sayers <kylesayrs@gmail.com>
1 parent 7b7d1a5 commit e6fdd06

File tree

3 files changed

+2
-153
lines changed

3 files changed

+2
-153
lines changed

src/llmcompressor/pytorch/model_load/helpers.py

Lines changed: 1 addition & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -1,75 +1,18 @@
1-
import json
2-
import os
3-
from typing import Any, Dict, List, Optional, Union
1+
from typing import Optional, Union
42

53
import torch
64
from loguru import logger
7-
from safetensors import safe_open
85
from torch.nn import Module
9-
from transformers import PreTrainedModel
106

117
from llmcompressor.core import active_session
12-
from llmcompressor.typing import Processor
13-
14-
COMPLETED_STAGES_FILENAME = "completed_stages.json"
158

169
__all__ = [
1710
"copy_python_files_from_model_cache",
1811
"parse_dtype",
1912
"get_session_model",
20-
"get_completed_stages",
21-
"save_completed_stages",
22-
"save_checkpoint",
2313
]
2414

2515

26-
def save_checkpoint(
27-
save_path: str,
28-
model: PreTrainedModel,
29-
processor: Optional[Processor] = None,
30-
save_safetensors: bool = True,
31-
save_compressed: bool = True,
32-
skip_sparsity_compression_stats: bool = False,
33-
):
34-
"""
35-
Save a model, processor, and recipe
36-
37-
:param save_path: Path used to save model and processor
38-
:param model: model to save
39-
:param processor: processor to save
40-
:param save_safetensors: save model checkpoint using safetensors file type
41-
:param save_compressed: save model checkpoint using compressed-tensors format
42-
"""
43-
from llmcompressor.transformers.compression.compressed_tensors_utils import (
44-
get_model_compressor, # avoid circular import
45-
)
46-
47-
# used for decompression
48-
# unfortunately, if skip_sparsity_compression_stats==True, sparsity stats
49-
# are computed twice. In the future, track sparsity from recipe or
50-
# share recipe between compression and decompression
51-
compressor = get_model_compressor(
52-
model=model,
53-
save_compressed=save_compressed,
54-
skip_sparsity_compression_stats=skip_sparsity_compression_stats,
55-
)
56-
57-
# saving the model also saves the recipe
58-
model.save_pretrained(
59-
save_path,
60-
save_safetensors=save_safetensors,
61-
save_compressed=save_compressed,
62-
skip_sparsity_compression_stats=skip_sparsity_compression_stats,
63-
)
64-
if processor is not None:
65-
processor.save_pretrained(save_path)
66-
67-
# decompression: saving the model modifies the model strcuture
68-
# as this is only a checkpoint, decompress model to enable future training/oneshot
69-
if compressor is not None:
70-
compressor.decompress_model(model)
71-
72-
7316
def parse_dtype(dtype_arg: Union[str, torch.dtype]) -> torch.dtype:
7417
"""
7518
:param dtype_arg: dtype or string to parse
@@ -100,47 +43,6 @@ def get_session_model() -> Optional[Module]:
10043
return active_model
10144

10245

103-
def get_completed_stages(checkpoint_dir: Any) -> List[str]:
104-
"""
105-
Given a checkpoint directory for a staged run, get the list of stages that
106-
have completed in a prior run if the checkpoint_dir is a string
107-
108-
:param checkpoint_dir: path to staged checkpoint
109-
:return: list of completed stage names
110-
"""
111-
if isinstance(checkpoint_dir, str):
112-
stage_path = os.path.join(checkpoint_dir, COMPLETED_STAGES_FILENAME)
113-
if os.path.exists(stage_path):
114-
with open(stage_path) as stage_file:
115-
stage_data = json.load(stage_file)
116-
return stage_data["completed"]
117-
118-
return []
119-
120-
121-
def save_completed_stages(checkpoint_dir: str, completed_stages: List[str]):
122-
"""
123-
Save a list of completed stages to a checkpoint directory
124-
125-
:param checkpoint_dir: model checkpoint directory to save stages to
126-
:param completed_stages: list of stage names that have been run
127-
"""
128-
stage_path = os.path.join(checkpoint_dir, COMPLETED_STAGES_FILENAME)
129-
with open(stage_path, "w") as out_file:
130-
json.dump({"completed": completed_stages}, out_file)
131-
132-
133-
def load_safetensors_state_dict(file_path: str) -> Dict[str, torch.Tensor]:
134-
"""
135-
Load a safetensors file from disk
136-
137-
:param file_path: path to the safetensors file
138-
:return: dictionary of safetensors data
139-
"""
140-
with safe_open(file_path, framework="pt", device="cpu") as f:
141-
return {key: f.get_tensor(key) for key in f.keys()}
142-
143-
14446
def copy_python_files_from_model_cache(model, save_path: str):
14547
config = model.config
14648
cache_path = None

src/llmcompressor/pytorch/utils/helpers.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,11 @@
22
Utility / helper functions
33
"""
44

5-
import random
65
from collections import OrderedDict
76
from collections.abc import Iterable, Mapping
87
from typing import Any
98

109
import numpy
11-
import torch
1210
from torch import Tensor
1311
from torch.nn import Module
1412

@@ -27,7 +25,6 @@
2725
"tensors_module_forward",
2826
"tensor_sparsity",
2927
"get_quantized_layers",
30-
"set_deterministic_seeds",
3128
]
3229

3330

@@ -238,15 +235,3 @@ def get_quantized_layers(module: Module) -> list[tuple[str, Module]]:
238235
quantized_layers.append((name, mod))
239236

240237
return quantized_layers
241-
242-
243-
def set_deterministic_seeds(seed: int = 0):
244-
"""
245-
Manually seeds the numpy, random, and torch packages.
246-
Also sets torch.backends.cudnn.deterministic to True
247-
:param seed: the manual seed to use. Default is 0
248-
"""
249-
numpy.random.seed(seed)
250-
random.seed(seed)
251-
torch.manual_seed(seed)
252-
torch.backends.cudnn.deterministic = True

src/llmcompressor/utils/helpers.py

Lines changed: 1 addition & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,8 @@
44
"""
55

66
import contextlib
7-
import importlib.metadata
8-
import importlib.util
7+
import importlib
98
import re
10-
from typing import Tuple, Union
119

1210
import torch
1311
from compressed_tensors.quantization import disable_quantization, enable_quantization
@@ -18,7 +16,6 @@
1816
from llmcompressor.utils import get_embeddings
1917

2018
__all__ = [
21-
"is_package_available",
2219
"import_from_path",
2320
"disable_cache",
2421
"DisableQuantization",
@@ -30,41 +27,6 @@
3027
]
3128

3229

33-
def is_package_available(
34-
package_name: str,
35-
return_version: bool = False,
36-
) -> Union[Tuple[bool, str], bool]:
37-
"""
38-
A helper function to check if a package is available
39-
and optionally return its version. This function enforces
40-
a check that the package is available and is not
41-
just a directory/file with the same name as the package.
42-
43-
inspired from:
44-
https://github.com/huggingface/transformers/blob/965cf677695dd363285831afca8cf479cf0c600c/src/transformers/utils/import_utils.py#L41
45-
46-
:param package_name: The package name to check for
47-
:param return_version: True to return the version of
48-
the package if available
49-
:return: True if the package is available, False otherwise or a tuple of
50-
(bool, version) if return_version is True
51-
"""
52-
53-
package_exists = importlib.util.find_spec(package_name) is not None
54-
package_version = "N/A"
55-
if package_exists:
56-
try:
57-
package_version = importlib.metadata.version(package_name)
58-
package_exists = True
59-
except importlib.metadata.PackageNotFoundError:
60-
package_exists = False
61-
logger.debug(f"Detected {package_name} version {package_version}")
62-
if return_version:
63-
return package_exists, package_version
64-
else:
65-
return package_exists
66-
67-
6830
def import_from_path(path: str) -> str:
6931
"""
7032
Import the module and the name of the function/class separated by :

0 commit comments

Comments
 (0)