Skip to content

Commit 2c65ad5

Browse files
RuntimeError: Can't call main_program when full_graph=False. Use paddle.jit.to_static(full_graph=True) instead. (#397)
File "/git/RT-DETR/rtdetr_paddle/ppdet/engine/trainer.py", line 937, in _get_infer_cfg_and_input_spec input_spec, static_model.forward.main_program, File "/miniconda3/envs/rtdetrpaddle/lib/python3.10/site-packages/paddle/jit/dy2static/program_translator.py", line 757, in main_program raise_error_template("main_program")() File "/miniconda3/envs/rtdetrpaddle/lib/python3.10/site-packages/paddle/jit/dy2static/program_translator.py", line 691, in _raise_error raise RuntimeError(error_template.format(func=func_str)) RuntimeError: Can't call main_program when full_graph=False. Use paddle.jit.to_static(full_graph=True) instead.
1 parent 6b36128 commit 2c65ad5

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

rtdetr_paddle/ppdet/engine/trainer.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -883,24 +883,24 @@ def _get_infer_cfg_and_input_spec(self,
883883
# set image_shape=[None, 3, -1, -1] as default
884884
if image_shape is None:
885885
image_shape = [None, 3, -1, -1]
886-
886+
887887
if len(image_shape) == 3:
888888
image_shape = [None] + image_shape
889889
else:
890890
im_shape = [image_shape[0], 2]
891891
scale_factor = [image_shape[0], 2]
892-
892+
893893
if hasattr(self.model, 'deploy'):
894894
self.model.deploy = True
895-
895+
896896
for layer in self.model.sublayers():
897897
if hasattr(layer, 'convert_to_deploy'):
898898
layer.convert_to_deploy()
899-
899+
900900
if hasattr(self.cfg, 'export') and 'fuse_conv_bn' in self.cfg[
901901
'export'] and self.cfg['export']['fuse_conv_bn']:
902902
self.model = fuse_conv_bn(self.model)
903-
903+
904904
export_post_process = self.cfg['export'].get(
905905
'post_process', False) if hasattr(self.cfg, 'export') else True
906906
export_nms = self.cfg['export'].get('nms', False) if hasattr(
@@ -913,12 +913,12 @@ def _get_infer_cfg_and_input_spec(self,
913913
self.model.export_nms = export_nms if not export_benchmark else False
914914
if export_post_process and not export_benchmark:
915915
image_shape = [None] + image_shape[1:]
916-
916+
917917
# Save infer cfg
918918
_dump_infer_config(self.cfg,
919919
os.path.join(save_dir, 'infer_cfg.yml'), image_shape,
920920
self.model)
921-
921+
922922
input_spec = [{
923923
"image": InputSpec(
924924
shape=image_shape, name='image'),
@@ -927,10 +927,10 @@ def _get_infer_cfg_and_input_spec(self,
927927
"scale_factor": InputSpec(
928928
shape=scale_factor, name='scale_factor')
929929
}]
930-
930+
931931
if prune_input:
932932
static_model = paddle.jit.to_static(
933-
self.model, input_spec=input_spec)
933+
self.model, input_spec=input_spec, full_graph=True)
934934
# NOTE: dy2st do not pruned program, but jit.save will prune program
935935
# input spec, prune input spec here and save with pruned input spec
936936
pruned_input_spec = _prune_input_spec(
@@ -939,7 +939,7 @@ def _get_infer_cfg_and_input_spec(self,
939939
else:
940940
static_model = None
941941
pruned_input_spec = input_spec
942-
942+
943943
return static_model, pruned_input_spec
944944

945945
def export(self, output_dir='output_inference'):

0 commit comments

Comments
 (0)