Skip to content

Commit 616c1cc

Browse files
up
Signed-off-by: greg-kwasniewski1 <[email protected]>
1 parent 2eb644e commit 616c1cc

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

tensorrt_llm/_torch/auto_deploy/transformations/transform.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def __call__(self, cm: CachedSequenceInterface) -> nn.Module:
115115
optimize_rope(egm)
116116

117117
sharding_config = ShardingConfig(local_rank, world_size, self.factory.get_sharding_config())
118-
self.ad_config.use_sharding_from_config = False
118+
# self.ad_config.use_sharding_from_config = False
119119
if (
120120
self.ad_config.use_sharding_from_config
121121
and sharding_config.predefined_config is not None

0 commit comments

Comments
 (0)