diff --git a/bayesflow/__init__.py b/bayesflow/__init__.py index 6f098ea86..e0ce82073 100644 --- a/bayesflow/__init__.py +++ b/bayesflow/__init__.py @@ -40,8 +40,12 @@ def setup(): torch.autograd.set_grad_enabled(False) logging.warning( + "\n" "When using torch backend, we need to disable autograd by default to avoid excessive memory usage. Use\n" + "\n" "with torch.enable_grad():\n" + " ...\n" + "\n" "in contexts where you need gradients (e.g. custom training loops)." ) diff --git a/bayesflow/utils/tensor_utils.py b/bayesflow/utils/tensor_utils.py index 9f161aaca..cd6f6d4ca 100644 --- a/bayesflow/utils/tensor_utils.py +++ b/bayesflow/utils/tensor_utils.py @@ -202,7 +202,9 @@ def searchsorted(sorted_sequence: Tensor, values: Tensor, side: str = "left") -> out_int32 = len(sorted_sequence) <= np.iinfo(np.int32).max - indices = torch.searchsorted(sorted_sequence, values, side=side, out_int32=out_int32) + indices = torch.searchsorted( + sorted_sequence.contiguous(), values.contiguous(), side=side, out_int32=out_int32 + ) return indices case _: