Skip to content

forward出现pnnx.param的内容 #6369

@1311523821

Description

@1311523821

error log | 日志或报错信息 | ログ

pnnxparam = ./pnnx/depth_anything_v2_vits.pnnx.param
pnnxbin = ./pnnx/depth_anything_v2_vits.pnnx.bin
pnnxpy = ./pnnx/depth_anything_v2_vits_pnnx.py
pnnxonnx = ./pnnx/depth_anything_v2_vits.pnnx.onnx
ncnnparam = ./pnnx/depth_anything_v2_vits.ncnn.param
ncnnbin = ./pnnx/depth_anything_v2_vits.ncnn.bin
ncnnpy = ./pnnx/depth_anything_v2_vits_ncnn.py
fp16 = 0
optlevel = 0
device = cpu
inputshape = [1,3,532,532]f32
inputshape2 =
customop =
moduleop =
get inputshape from traced inputs
inputshape = [1,3,532,532]f32
############# pass_level0
inline module = depth_anything_v2.dinov2_layers.attention.MemEffAttention
inline module = depth_anything_v2.dinov2_layers.block.NestedTensorBlock
inline module = depth_anything_v2.dinov2_layers.layer_scale.LayerScale
inline module = depth_anything_v2.dinov2_layers.mlp.Mlp
inline module = depth_anything_v2.dinov2_layers.patch_embed.PatchEmbed
inline module = depth_anything_v2.dpt.DPTHead
inline module = depth_anything_v2.util.blocks.FeatureFusionBlock
inline module = depth_anything_v2.util.blocks.ResidualConvUnit
inline module = torch.nn.modules.linear.Identity
inline module = depth_anything_v2.dinov2_layers.attention.MemEffAttention
inline module = depth_anything_v2.dinov2_layers.block.NestedTensorBlock
inline module = depth_anything_v2.dinov2_layers.layer_scale.LayerScale
inline module = depth_anything_v2.dinov2_layers.mlp.Mlp
inline module = depth_anything_v2.dinov2_layers.patch_embed.PatchEmbed
inline module = depth_anything_v2.dpt.DPTHead
inline module = depth_anything_v2.util.blocks.FeatureFusionBlock
inline module = depth_anything_v2.util.blocks.ResidualConvUnit
inline module = torch.nn.modules.linear.Identity


############# pass_level1
############# pass_level2
Traceback (most recent call last):
File "e:\AI\Code\WorkCode\Depth-Anything-V2\export_ncnn.py", line 124, in
export_depth_anything_v2(
File "e:\AI\Code\WorkCode\Depth-Anything-V2\export_ncnn.py", line 52, in export_depth_anything_v2
pnnx.export(model, export_path_base,
File "D:\ProgramData\anaconda3\envs\bridgedepth\lib\site-packages\pnnx\utils\export.py", line 22, in export
return convert(ptpath, inputs, inputs2, input_shapes, input_types, input_shapes2, input_types2, device, customop, moduleop, optlevel, pnnxparam, pnnxbin, pnnxpy, pnnxonnx, ncnnparam, ncnnbin, ncnnpy, fp16)
File "D:\ProgramData\anaconda3\envs\bridgedepth\lib\site-packages\pnnx\utils\convert.py", line 160, in convert
spec.loader.exec_module(foo)
File "", line 879, in exec_module
File "", line 1017, in get_code
File "", line 947, in source_to_code
File "", line 241, in _call_with_frames_removed
File "E:\AI\Code\WorkCode\Depth-Anything-V2./pnnx/depth_anything_v2_vits_pnnx.py", line 458
v_97 = prim::Constant(value=14)

depth_anything_v2_vits_pnnx.py
def forward(self, v_x_1):
v_97 = prim::Constant(value=14)
v_147 = prim::Constant(value=-1)
v_154 = prim::Constant(value=1)
v_157 = prim::Constant(value=2147483647)
v_190 = prim::Constant(value=37)
v_pos_embed_1 = self.pretrained_data
v_cls_token_1 = self.pretrained_data
v_94 = v_x_1.size(dim=2)
v_95 = prim::NumToTensor(v_94)
v_patch_h_1 = aten::floor_divide(v_95, v_97)
v_101 = aten::Int(v_patch_h_1)
v_104 = aten::Int(v_patch_h_1)
v_107 = aten::Int(v_patch_h_1)
v_110 = aten::Int(v_patch_h_1)
v_113 = v_x_1.size(dim=3)
v_114 = prim::NumToTensor(v_113)
v_1468 = prim::Constant(value=14)

context | 编译/运行环境 | バックグラウンド

torch 2.7.0+cu118
pnnx 20251016
ncnn 1.0.20250916
Python 3.10.18

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions