Skip to content

Commit 47424a2

Browse files
authored
【Hackathon 9th Sprint No.8】Add RangeDecomposerBackend (#372)
* add test * add backend * split * fix * Delete graph_net/test/split_points.py * Update help text for configuration argumentf
1 parent a762155 commit 47424a2

File tree

6 files changed

+447
-2
lines changed

6 files changed

+447
-2
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
#!/bin/bash
2+
3+
GRAPH_NET_ROOT=$(python3 -c "import graph_net; import os; print(os.path.dirname(os.path.dirname(graph_net.__file__)))")
4+
5+
MODEL1="$GRAPH_NET_ROOT/samples/torchvision/resnet18"
6+
MODEL2="$GRAPH_NET_ROOT/samples/torchvision/resnet34"
7+
MODEL_LIST_FILE=$(mktemp)
8+
echo "$MODEL1" > "$MODEL_LIST_FILE"
9+
echo "$MODEL2" >> "$MODEL_LIST_FILE"
10+
11+
python3 -m graph_net.torch.typical_sequence_split_points \
12+
--model-list "$MODEL_LIST_FILE" \
13+
--device "cuda" \
14+
--window-size 10 \
15+
--output-json "$GRAPH_NET_ROOT/split_results.json"
16+
17+
rm -f "$MODEL_LIST_FILE"
18+
19+
20+
MODEL_PATH_IN_SAMPLES=/torchvision/resnet18
21+
MODEL_NAME=$(basename "$MODEL_PATH_IN_SAMPLES")
22+
23+
decomposer_config_json_str=$(cat <<EOF
24+
{
25+
"split_results_path": "$GRAPH_NET_ROOT/split_results.json",
26+
"workspace_path": "$GRAPH_NET_ROOT/decompose_workspace",
27+
"chain_style": "True"
28+
}
29+
EOF
30+
)
31+
DECOMPOSER_CONFIG=$(echo $decomposer_config_json_str | base64 -w 0)
32+
33+
python3 -m graph_net.torch.test_compiler --model-path $GRAPH_NET_ROOT/samples/$MODEL_PATH_IN_SAMPLES --compiler range_decomposer --device cuda --config=$DECOMPOSER_CONFIG
34+
35+
36+
DECOMPOSE_PATH=$GRAPH_NET_ROOT/decompose_workspace
37+
cp -r "$GRAPH_NET_ROOT/samples/$MODEL_PATH_IN_SAMPLES" "$DECOMPOSE_PATH/"
38+
39+
python3 -m graph_net.torch.test_compiler \
40+
--model-path $DECOMPOSE_PATH/$MODEL_NAME \
41+
--compiler range_decomposer_validator \
42+
--device cuda > "$DECOMPOSE_PATH/log.log" 2>&1
43+
44+
python3 -m graph_net.plot_ESt \
45+
--benchmark-path $DECOMPOSE_PATH/log.log \
46+
--output-dir $DECOMPOSE_PATH \
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import base64
2+
import json
3+
import subprocess
4+
import sys
5+
from pathlib import Path
6+
from typing import Any, Dict
7+
8+
import torch
9+
import graph_net
10+
11+
12+
def convert_to_dict(config_str):
13+
if config_str is None:
14+
return {}
15+
config_str = base64.b64decode(config_str).decode("utf-8")
16+
config = json.loads(config_str)
17+
assert isinstance(config, dict), f"config should be a dict. {config_str=}"
18+
return config
19+
20+
21+
def encode_config(config: Dict[str, Any]) -> str:
22+
json_str = json.dumps(config)
23+
return base64.b64encode(json_str.encode("utf-8")).decode("utf-8")
24+
25+
26+
def load_json(file_path):
27+
with open(file_path, "r", encoding="utf-8") as file:
28+
data_dict = json.load(file)
29+
return data_dict
30+
31+
32+
class RangeDecomposerBackend:
33+
def __init__(self):
34+
self.graph_net_root = Path(graph_net.__file__).parent
35+
36+
def __call__(self, model: torch.nn.Module) -> torch.nn.Module:
37+
config = convert_to_dict(self.config)
38+
workspace_path = Path(config["workspace_path"])
39+
chain_style = config["chain_style"]
40+
41+
model_file_path = Path(model.__class__.__graph_net_file_path__)
42+
model_name = model_file_path.parent.name
43+
44+
model_info = load_json(config["split_results_path"])[model_name]
45+
model_path = model_info["path"]
46+
split_points = model_info["split_points"]
47+
48+
model_output_dir = workspace_path / f"{model_name}_decomposed"
49+
model_output_dir.mkdir(parents=True, exist_ok=True)
50+
51+
config_dict = {
52+
"decorator_path": str(self.graph_net_root / "torch/extractor.py"),
53+
"decorator_config": {
54+
"name": model_name,
55+
"custom_extractor_path": str(
56+
self.graph_net_root / "torch/naive_graph_decomposer.py"
57+
),
58+
"custom_extractor_config": {
59+
"output_dir": str(model_output_dir),
60+
"split_positions": split_points,
61+
"group_head_and_tail": True,
62+
"filter_path": str(
63+
self.graph_net_root / "torch/naive_subgraph_filter.py"
64+
),
65+
"filter_config": {},
66+
"chain_style": chain_style,
67+
},
68+
},
69+
}
70+
71+
encoded_config = encode_config(config_dict)
72+
73+
cmd = [
74+
sys.executable,
75+
"-m",
76+
"graph_net.torch.run_model",
77+
"--model-path",
78+
model_path,
79+
"--decorator-config",
80+
encoded_config,
81+
]
82+
83+
try:
84+
subprocess.run(cmd, check=True)
85+
print(f"[Success] Saved to {model_output_dir}")
86+
except subprocess.CalledProcessError as e:
87+
print(f"[Error] Process failed: {e}")
88+
except Exception as e:
89+
print(f"[Error] Unexpected: {e}")
90+
return model
91+
92+
def synchronize(self):
93+
if torch.cuda.is_available():
94+
torch.cuda.synchronize()

graph_net/torch/rp_expr/longest_rp_expr_parser.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ def __call__(self, primitive_id_lists: t.List[t.List[PrimitiveId]]):
3030
token_id2primitive_id
3131
)
3232
]
33+
if not cur_primitive_id_lists:
34+
continue
3335
cur_lets_list_rp_expr, cur_token_id2primitive_id = rp_expr_parser(
3436
cur_primitive_id_lists
3537
)

graph_net/torch/rp_expr/rp_expr.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def get_range(size):
369369
segments = [
370370
token_tensor[start:end]
371371
for consecutive_tensor in consecutive_tensors
372-
for start, end in [get_range(int(consecutive_tensor.size(0)))]
372+
for start, end in [get_range(len(consecutive_tensor))]
373373
]
374374

375375
return segments

graph_net/torch/test_compiler.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from graph_net.torch.backend.blade_disc_backend import BladeDISCBackend
2424
from graph_net.torch.backend.nope_backend import NopeBackend
2525
from graph_net.torch.backend.unstable_to_stable_backend import UnstableToStableBackend
26+
from graph_net.torch.backend.range_decomposer_backend import RangeDecomposerBackend
2627
from graph_net.torch.backend.range_decomposer_validator_backend import (
2728
RangeDecomposerValidatorBackend,
2829
)
@@ -39,6 +40,7 @@
3940
"bladedisc": BladeDISCBackend(),
4041
"nope": NopeBackend(),
4142
"unstable_to_stable": UnstableToStableBackend(),
43+
"range_decomposer": RangeDecomposerBackend(),
4244
"range_decomposer_validator": RangeDecomposerValidatorBackend(),
4345
}
4446

@@ -94,7 +96,10 @@ def load_class_from_file(
9496

9597
def get_compiler_backend(args) -> GraphCompilerBackend:
9698
assert args.compiler in registry_backend, f"Unknown compiler: {args.compiler}"
97-
return registry_backend[args.compiler]
99+
backend = registry_backend[args.compiler]
100+
if args.config is not None:
101+
backend.config = args.config
102+
return backend
98103

99104

100105
def get_model(args):
@@ -447,5 +452,12 @@ def main(args):
447452
default=None,
448453
help="Path to samples list, each line contains a sample path",
449454
)
455+
parser.add_argument(
456+
"--config",
457+
type=str,
458+
required=False,
459+
default=None,
460+
help="base64 encode configuration json.",
461+
)
450462
args = parser.parse_args()
451463
main(args=args)

0 commit comments

Comments
 (0)