Skip to content

Commit 051ee24

Browse files
committed
make it more obvious how to turn on fp16
1 parent 22f24e1 commit 051ee24

File tree

2 files changed

+10
-1
lines changed

2 files changed

+10
-1
lines changed

alphafold3_pytorch/trainer.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,7 @@ def __init__(
275275
checkpoint_folder: str = './checkpoints',
276276
overwrite_checkpoints: bool = False,
277277
fabric_kwargs: dict = dict(),
278+
fp16: bool = False,
278279
use_ema: bool = True,
279280
ema_kwargs: dict = dict(
280281
use_foreach = True
@@ -284,6 +285,14 @@ def __init__(
284285
):
285286
super().__init__()
286287

288+
# fp16 precision is a root level kwarg
289+
290+
if fp16:
291+
assert 'precision' not in fabric_kwargs
292+
fabric_kwargs.update(precision = '16-mixed')
293+
294+
# instantiate fabric
295+
287296
if not exists(fabric):
288297
fabric = Fabric(accelerator = accelerator, **fabric_kwargs)
289298

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "alphafold3-pytorch"
3-
version = "0.2.61"
3+
version = "0.2.62"
44
description = "Alphafold 3 - Pytorch"
55
authors = [
66
{ name = "Phil Wang", email = "[email protected]" }

0 commit comments

Comments
 (0)