@@ -120,7 +120,9 @@ def fold_and_annotate_arg(
120120 if input_qparams is not None :
121121 node .meta ["input_qparams" ][i ] = input_qparams
122122 for n in nodes_to_remove :
123- assert n .target == dq_op
123+ if n .target != dq_op :
124+ raise RuntimeError (f"Expected { dq_op } dq_op, got { n .target } " )
125+
124126 n .replace_all_uses_with (n .args [0 ]) # type: ignore[arg-type]
125127 graph_module .graph .erase_node (n )
126128
@@ -136,14 +138,16 @@ def call(self, graph_module: GraphModule) -> PassResult:
136138 continue
137139
138140 # Make sure we haven't already set qparams meta information on the node
139- assert "input_qparams" not in n .meta , (
140- f'Unexpected key "input_qparams" found in meta for node { n } . '
141- "input_qparams should not have been set at this point"
142- )
143- assert "output_qparams" not in n .meta , (
144- f'Unexpected key "output_qparams" found in meta for node { n } . '
145- "output_qparams should not have been set at this point"
146- )
141+ if "input_qparams" in n .meta :
142+ raise RuntimeError (
143+ f'Unexpected key "input_qparams" found in meta for node { n } . '
144+ "input_qparams should not have been set at this point"
145+ )
146+ if "output_qparams" in n .meta :
147+ raise RuntimeError (
148+ f'Unexpected key "output_qparams" found in meta for node { n } . '
149+ "output_qparams should not have been set at this point"
150+ )
147151
148152 # for the inputs and outputs search the graph for quantization info and
149153 # store the information in a dict with order of the _tensor_ inputs as key,
0 commit comments