We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 47ec22f commit 897096aCopy full SHA for 897096a
examples/example.py
@@ -99,13 +99,7 @@ def __len__(self):
99
model.eval()
100
# enable fused and locality-aware memory access optimization
101
torchsparse.backends.benchmark = True # type: ignore
102
- # enable adaptive grouping optimization
103
- torchsparse.tune(
104
- model=model,
105
- data_loader=dataflow,
106
- n_samples=10,
107
- collect_fn=lambda data: data['input'],
108
- )
+
109
with torch.no_grad():
110
for k, feed_dict in enumerate(dataflow):
111
inputs = feed_dict['input'].to(device=args.device).half()
0 commit comments