File tree Expand file tree Collapse file tree 1 file changed +14
-1
lines changed
Expand file tree Collapse file tree 1 file changed +14
-1
lines changed Original file line number Diff line number Diff line change 33import warnings
44
55import torch
6+ from warp ._src import utils as wp_utils
67
78from ._models import get_upet , save_upet
89
1415 message = "The .grad attribute of a Tensor that is not a leaf Tensor" ,
1516)
1617
18+ # we want to suppress a further warning from nvalchemi's usage of warp
19+ # warp uses an internal warn() helper: we wrap it.
20+ _orig_warn = wp_utils .warn
21+
22+
23+ def _warn_filtered (message , category = None , stacklevel = 1 ):
24+ if category is DeprecationWarning and "warp.vec" in str (message ):
25+ return
26+ return _orig_warn (message , category = category , stacklevel = stacklevel )
27+
28+
29+ wp_utils .warn = _warn_filtered
30+
1731# Disable static fusion. Besides the fact that atomistic batches have variable
1832# sizes, statically fused CUDA kernels cannot allocate new tensors at runtime,
1933# causing "Global alloc not supported yet" errors (cuda 13+) at the time of writing
2034torch .jit .set_fusion_strategy ([("DYNAMIC" , 10 )])
2135
22-
2336__all__ = ["get_upet" , "save_upet" ]
You can’t perform that action at this time.
0 commit comments