We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 69e2387 commit 045df8eCopy full SHA for 045df8e
bayesflow/__init__.py
@@ -40,9 +40,9 @@ def setup():
40
torch.autograd.set_grad_enabled(False)
41
42
logging.warning(
43
- "When using torch backend, we need to disable autograd by default to avoid excessive memory usage. Use\n"
44
- "with torch.enable_grad():\n"
45
- "in contexts where you need gradients (e.g. custom training loops)."
+ "Autograd is disabled by default to avoid excessive memory usage. "
+ "If you need gradients (e.g., custom training loops), use\n"
+ "with torch.enable_grad():"
46
)
47
48
0 commit comments