We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent bbf945b commit 82a7177Copy full SHA for 82a7177
paddlenlp/transformers/model_utils.py
@@ -1916,7 +1916,7 @@ def _fuse_or_split_keys(
1916
if (
1917
shard_file.endswith(".safetensors")
1918
and config.tensor_parallel_degree > 1
1919
- and "tp" not in os.path.spilt(shard_file)[-1]
+ and "tp" not in os.path.split(shard_file)[-1]
1920
):
1921
pre_tensor_parallel_split = True
1922
assert loaded_keys is not None, "loaded_keys is not None."
0 commit comments