We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 36656b6 + 57385fc commit bf84e1aCopy full SHA for bf84e1a
torch2trt/torch2trt.py
@@ -372,7 +372,7 @@ def _configure_layer(self, layer):
372
device_type = self._ctx.current_device_type()
373
self._ctx.builder_config.set_device_type(layer, device_type)
374
orig_device_type = device_type
375
- if not self._ctx.builder_config.can_run_on_DLA(layer) and device_type == trt.DeviceType.DLA:
+ if device_type == trt.DeviceType.DLA and not self._ctx.builder_config.can_run_on_DLA(layer):
376
if self._ctx.torch2trt_kwargs['gpu_fallback']:
377
device_type = trt.DeviceType.GPU # layer will fall back to GPU
378
0 commit comments