We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 57f560a commit 997cf78Copy full SHA for 997cf78
vllm/distributed/parallel_state.py
@@ -713,8 +713,8 @@ def recv(self,
713
size: torch.Size,
714
dtype: torch.dtype,
715
src: Optional[int] = None) -> torch.Tensor:
716
- """Receives a tensor from the src rank."""
717
- """NOTE: `src` is the local rank of the destination rank."""
+ """Receives a tensor from the source rank."""
+ """NOTE: `src` is the local rank of the source rank."""
718
if src is None:
719
src = (self.rank_in_group - 1) % self.world_size
720
0 commit comments