|
18 | 18 | )
|
19 | 19 | from transformers.utils.import_utils import is_torch_npu_available
|
20 | 20 |
|
21 |
| -from axolotl.utils.distributed import is_main_process |
22 | 21 | from axolotl.utils.schemas.datasets import (
|
23 | 22 | DatasetConfig,
|
24 | 23 | DPODataset,
|
@@ -719,10 +718,9 @@ def check_eval_packing(cls, data):
|
719 | 718 | and data.get("eval_sample_packing") is None
|
720 | 719 | and not data.get("eval_table_size")
|
721 | 720 | ):
|
722 |
| - if is_main_process(): |
723 |
| - LOG.info( |
724 |
| - "explicitly setting `eval_sample_packing` to match `sample_packing`" |
725 |
| - ) |
| 721 | + LOG.info( |
| 722 | + "explicitly setting `eval_sample_packing` to match `sample_packing`" |
| 723 | + ) |
726 | 724 | data["eval_sample_packing"] = True
|
727 | 725 |
|
728 | 726 | if (
|
@@ -1179,15 +1177,14 @@ def check_sequence_parallel_degree(self):
|
1179 | 1177 | # TODO: monkeypatch / callback to average losses correctly across SP ranks
|
1180 | 1178 | # / fix gradient scaling across SP ranks. Losses, grads should be scaled
|
1181 | 1179 | # according to the proportion of non-padding tokens per rank.
|
1182 |
| - if is_main_process(): |
1183 |
| - LOG.warning( |
1184 |
| - "Sequence parallelism (SP) is enabled with " |
1185 |
| - f"sequence_parallel_degree={self.sequence_parallel_degree}. " |
1186 |
| - "Please note that logged losses may differ slightly to the non-SP " |
1187 |
| - "losses due to transformers Trainer implementation details. " |
1188 |
| - "Please see https://github.com/axolotl-ai-cloud/axolotl/pull/2495#issuecomment-2784022042 " |
1189 |
| - "for more details." |
1190 |
| - ) |
| 1180 | + LOG.warning( |
| 1181 | + "Sequence parallelism (SP) is enabled with " |
| 1182 | + f"sequence_parallel_degree={self.sequence_parallel_degree}. " |
| 1183 | + "Please note that logged losses may differ slightly to the non-SP " |
| 1184 | + "losses due to transformers Trainer implementation details. " |
| 1185 | + "Please see https://github.com/axolotl-ai-cloud/axolotl/pull/2495#issuecomment-2784022042 " |
| 1186 | + "for more details." |
| 1187 | + ) |
1191 | 1188 |
|
1192 | 1189 | return self
|
1193 | 1190 |
|
|
0 commit comments