Skip to content

Commit bc3b249

Browse files
committed
Update.
1 parent c82ec9e commit bc3b249

File tree

1 file changed

+30
-16
lines changed

1 file changed

+30
-16
lines changed

graph_net/tools/generate_subgraph_dataset.sh

Lines changed: 30 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,17 @@
11
#!/bin/bash
22
set -x
33

4-
export CUDA_VISIBLE_DEVICES=4
4+
OP_NUM=${1:-64}
5+
GPU_ID=${2:-4}
6+
7+
export CUDA_VISIBLE_DEVICES="${GPU_ID}"
58
export PYTHONPATH=/work/GraphNet:/work/abstract_pass/Athena:$PYTHONPATH
69

710
GRAPH_NET_ROOT=$(python3 -c "import graph_net; import os; print(os.path.dirname(os.path.dirname(graph_net.__file__)))")
811

9-
OP_NUM=64
10-
DECOMPOSE_WORKSPACE=/work/graphnet_test_workspace/subgraph_dataset_20251218
12+
DECOMPOSE_WORKSPACE=/work/graphnet_test_workspace/subgraph_dataset_20251221
1113
LEVEL_DECOMPOSE_WORKSPACE=$DECOMPOSE_WORKSPACE/decomposed_${OP_NUM}ops
12-
OP_NAMES_OUTPUT_DIR=${LEVEL_DECOMPOSE_WORKSPACE}/sample_op_names
14+
OP_NAMES_OUTPUT_DIR=${DECOMPOSE_WORKSPACE}/sample_op_names
1315
RANGE_DECOMPOSE_OUTPUT_DIR="${LEVEL_DECOMPOSE_WORKSPACE}/range_decompose"
1416
GRAPH_VAR_RENAME_OUTPUT_DIR=$LEVEL_DECOMPOSE_WORKSPACE/graph_var_renamed
1517
DEDUPLICATED_OUTPUT_DIR=$LEVEL_DECOMPOSE_WORKSPACE/deduplicated
@@ -54,18 +56,27 @@ EOF
5456
}
5557

5658
function generate_split_point() {
59+
# MIN_SEQ_OPS, MAX_SEQ_OPS
60+
# level 1: 2, 4
61+
# level 2: 4, 8
62+
# level 3: 8, 16
63+
# level 4: 16, 32
64+
# level 5: 32, 64
65+
MIN_SEQ_OPS=$((${OP_NUM} / 2))
66+
MAX_SEQ_OPS=${OP_NUM}
5767
echo ">>> [2] Generate split points for samples in ${model_list}."
68+
echo ">>> OP_NUM: ${OP_NUM}, MIN_SEQ_OPS: ${MIN_SEQ_OPS}, MAX_SEQ_OPS: ${MAX_SEQ_OPS}"
5869
echo ">>>"
5970
python3 -m graph_net.torch.typical_sequence_split_points \
60-
--enable-resume \
6171
--model-list "$model_list" \
6272
--op-names-path-prefix "${OP_NAMES_OUTPUT_DIR}" \
6373
--device "cuda" \
6474
--window-size ${OP_NUM} \
6575
--fold-policy default \
6676
--fold-times 16 \
67-
--min-seq-ops $((${OP_NUM} / 2)) \
68-
--max-seq-ops $((${OP_NUM} * 2)) \
77+
--min-seq-ops ${MIN_SEQ_OPS} \
78+
--max-seq-ops ${MAX_SEQ_OPS} \
79+
--subgraph-ranges-json "$LEVEL_DECOMPOSE_WORKSPACE/subgraph_ranges_${OP_NUM}.json" \
6980
--output-json "$LEVEL_DECOMPOSE_WORKSPACE/split_results_${OP_NUM}.json"
7081
}
7182

@@ -79,10 +90,11 @@ function range_decompose() {
7990
"handler_path": "$GRAPH_NET_ROOT/graph_net/torch/graph_decomposer.py",
8091
"handler_class_name": "RangeDecomposerExtractor",
8192
"handler_config": {
82-
"resume": true,
93+
"resume": false,
8394
"model_path_prefix": "$GRAPH_NET_ROOT",
8495
"output_dir": "${RANGE_DECOMPOSE_OUTPUT_DIR}",
8596
"split_results_path": "$LEVEL_DECOMPOSE_WORKSPACE/split_results_${OP_NUM}.json",
97+
"subgraph_ranges_path": "$LEVEL_DECOMPOSE_WORKSPACE/subgraph_ranges_${OP_NUM}.json",
8698
"group_head_and_tail": true,
8799
"chain_style": false
88100
}
@@ -130,13 +142,16 @@ function generate_unittests() {
130142
--model-path-list ${deduplicated_subgraph_list} \
131143
--handler-config=$(base64 -w 0 <<EOF
132144
{
133-
"handler_path": "$GRAPH_NET_ROOT/graph_net/torch/sample_passes/agent_unittest_generator.py",
145+
"handler_path": "$GRAPH_NET_ROOT/graph_net/sample_pass/agent_unittest_generator.py",
134146
"handler_class_name": "AgentUnittestGeneratorPass",
135147
"handler_config": {
148+
"framework": "torch",
136149
"model_path_prefix": "${DEDUPLICATED_OUTPUT_DIR}",
137150
"output_dir": "$UNITTESTS_OUTPUT_DIR",
138151
"device": "cuda",
139-
"generate_main": false,
152+
"generate_main": true,
153+
"try_run": true,
154+
"resume": true,
140155
"data_input_predicator_filepath": "$GRAPH_NET_ROOT/graph_net/torch/constraint_util.py",
141156
"data_input_predicator_class_name": "RenamedDataInputPredicator"
142157
}
@@ -146,18 +161,17 @@ EOF
146161
}
147162

148163
main() {
149-
suffix="subgraph_${OP_NUM}ops_20251219"
150-
generate_op_names 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_generate_op_names_${suffix}.txt
151-
generate_split_point 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_generate_split_point_${suffix}.txt
164+
timestamp=`date +%Y%m%d_%H%M`
165+
suffix="${OP_NUM}ops_${timestamp}"
166+
#generate_op_names 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_op_names_${suffix}.txt
167+
generate_split_point 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_split_point_${suffix}.txt
152168
range_decompose 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_range_decompose_${suffix}.txt
153-
154-
generate_subgraph_list ${RANGE_DECOMPOSE_OUTPUT_DIR} ${range_decomposed_subgraph_list}
155169

170+
generate_subgraph_list ${RANGE_DECOMPOSE_OUTPUT_DIR} ${range_decomposed_subgraph_list}
156171
rename_subgraph 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_rename_subgraph_${suffix}.txt
157172
remove_duplicates 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_remove_duplicates_${suffix}.txt
158173

159174
generate_subgraph_list ${DEDUPLICATED_OUTPUT_DIR} ${deduplicated_subgraph_list}
160-
161175
generate_unittests 2>&1 | tee ${LEVEL_DECOMPOSE_WORKSPACE}/log_generate_unittests_${suffix}.txt
162176
}
163177

0 commit comments

Comments
 (0)