Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions configs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,16 @@ def has_xpu() -> bool:
else:
return False

@staticmethod
def use_insecure_load():
try:
from fairseq.data.dictionary import Dictionary

logging.warning("Using insecure weight loading for fairseq dictionary")
torch.serialization.add_safe_globals([Dictionary])
except AttributeError:
pass

def use_fp32_config(self):
for config_file in version_config_list:
self.json_config[config_file]["train"]["fp16_run"] = False
Expand Down Expand Up @@ -210,10 +220,16 @@ def device_config(self):
else:
if self.instead:
logger.info(f"Use {self.instead} instead")

logger.info(
"Half-precision floating-point: %s, device: %s"
% (self.is_half, self.device)
)

# Check if the pytorch is 2.6 or higher
if tuple(map(int, torch.__version__.split("+")[0].split("."))) >= (2, 6, 0):
self.use_insecure_load()

return x_pad, x_query, x_center, x_max


Expand Down
Loading