Skip to content

Commit acb271c

Browse files
committed
2 parents 4ab926e + c61c4e7 commit acb271c

File tree

4 files changed

+20
-18
lines changed

4 files changed

+20
-18
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ ttest
55
*.engine
66
*.pyc
77
*.infer
8+
*.npy
89

910
z_demo_*
1011

README.md

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ you can use darknet2pytorch to convert it yourself, or download my converted mod
119119
120120
- **This script is to convert the official pretrained darknet model into ONNX**
121121
122-
- **Pytorch version Recommended: 1.4.0**
122+
- **Pytorch version Recommended: 1.4.0 for TensorRT 7.0, 1.5.0 or higher for TensorRT 7.1**
123123
124124
- **Install onnxruntime**
125125
@@ -142,7 +142,7 @@ you can use darknet2pytorch to convert it yourself, or download my converted mod
142142
143143
- **You can convert your trained pytorch model into ONNX using this script**
144144
145-
- **Pytorch version Recommended: 1.4.0**
145+
- **Pytorch version Recommended: 1.4.0 for TensorRT 7.0, 1.5.0 or higher for TensorRT 7.1**
146146
147147
- **Install onnxruntime**
148148
@@ -191,8 +191,6 @@ you can use darknet2pytorch to convert it yourself, or download my converted mod
191191
192192
- Note2: extra NMS operations are needed for the tensorRT output. This demo uses python NMS code from `tool/utils.py`.
193193
194-
- Inference is verified to be okay for TensorRT 7.0, but TensorRT 7.1 may be problematic in handling y coordinates of bounding boxes. Problem investigation is underway.
195-
196194
197195
# 6. ONNX2Tensorflow
198196

demo_trt.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def main(engine_path, image_path, image_size):
119119

120120
for i in range(2): # This 'for' loop is for speed check
121121
# Because the first iteration is usually longer
122-
boxes = detect(engine, context, buffers, image_src, image_size, num_classes)
122+
boxes = detect(context, buffers, image_src, image_size, num_classes)
123123

124124
if num_classes == 20:
125125
namesfile = 'data/voc.names'
@@ -140,7 +140,7 @@ def get_engine(engine_path):
140140

141141

142142

143-
def detect(engine, context, buffers, image_src, image_size, num_classes):
143+
def detect(context, buffers, image_src, image_size, num_classes):
144144
IN_IMAGE_H, IN_IMAGE_W = image_size
145145

146146
ta = time.time()

tool/yolo_layer.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -189,13 +189,13 @@ def yolo_forward(output, conf_thresh, num_classes, anchors, num_anchors, scale_x
189189
for i in range(num_anchors):
190190
ii = i * 2
191191
# Shape: [batch, 1, H, W]
192-
bx = bxy[:, ii] + torch.tensor(grid_x, device=device, dtype=torch.float32) # grid_x.to(device=device, dtype=torch.float32)
192+
bx = bxy[:, ii : ii + 1] + torch.tensor(grid_x, device=device, dtype=torch.float32) # grid_x.to(device=device, dtype=torch.float32)
193193
# Shape: [batch, 1, H, W]
194-
by = bxy[:, ii + 1] + torch.tensor(grid_y, device=device, dtype=torch.float32) # grid_y.to(device=device, dtype=torch.float32)
194+
by = bxy[:, ii + 1 : ii + 2] + torch.tensor(grid_y, device=device, dtype=torch.float32) # grid_y.to(device=device, dtype=torch.float32)
195195
# Shape: [batch, 1, H, W]
196-
bw = bwh[:, ii] * anchor_w[i]
196+
bw = bwh[:, ii : ii + 1] * anchor_w[i]
197197
# Shape: [batch, 1, H, W]
198-
bh = bwh[:, ii + 1] * anchor_h[i]
198+
bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i]
199199

200200
bx_list.append(bx)
201201
by_list.append(by)
@@ -216,17 +216,20 @@ def yolo_forward(output, conf_thresh, num_classes, anchors, num_anchors, scale_x
216216
# Shape: [batch, num_anchors, H, W]
217217
bh = torch.cat(bh_list, dim=1)
218218

219+
# Shape: [batch, 2 * num_anchors, H, W]
220+
bx_bw = torch.cat((bx, bw), dim=1)
221+
# Shape: [batch, 2 * num_anchors, H, W]
222+
by_bh = torch.cat((by, bh), dim=1)
223+
219224
# normalize coordinates to [0, 1]
220-
bx = bx / W
221-
by = by / H
222-
bw = bw / W
223-
bh = bh / H
225+
bx_bw /= W
226+
by_bh /= H
224227

225228
# Shape: [batch, num_anchors * H * W, 1]
226-
bx = bx.view(batch, num_anchors * H * W, 1)
227-
by = by.view(batch, num_anchors * H * W, 1)
228-
bw = bw.view(batch, num_anchors * H * W, 1)
229-
bh = bh.view(batch, num_anchors * H * W, 1)
229+
bx = bx_bw[:, :num_anchors].view(batch, num_anchors * H * W, 1)
230+
by = by_bh[:, :num_anchors].view(batch, num_anchors * H * W, 1)
231+
bw = bx_bw[:, num_anchors:].view(batch, num_anchors * H * W, 1)
232+
bh = by_bh[:, num_anchors:].view(batch, num_anchors * H * W, 1)
230233

231234
# Shape: [batch, num_anchors * h * w, 4]
232235
boxes = torch.cat((bx, by, bw, bh), dim=2).view(batch, num_anchors * H * W, 4)

0 commit comments

Comments
 (0)