Skip to content

Commit 52fa043

Browse files
authored
Fix pyre
Differential Revision: D66468376 Pull Request resolved: #7058
1 parent ffb1b7d commit 52fa043

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

examples/models/llama/source_transformation/apply_spin_quant_r1_r2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,9 +146,9 @@ def fuse_ln_linear(
146146
torch.zeros(linear.out_features, dtype=torch.float32)
147147
)
148148
linear.bias.data = linear.bias.data.to(dtype=torch.float32) + torch.matmul(
149+
W_,
149150
# pyre-fixme[6]: For 2nd argument expected `Tensor` but got
150151
# `Union[Tensor, Module]`.
151-
W_,
152152
layernorm.bias.to(dtype=torch.float32),
153153
)
154154
linear.bias.data = linear.bias.data.to(linear_dtype)

exir/emit/_emitter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1634,8 +1634,8 @@ def plan(self) -> ExecutionPlan:
16341634
# missing in scenarios like unit test that does not enable memory planning, assume an
16351635
# empty list.
16361636
non_const_buffer_sizes=typing.cast(
1637-
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C.TensorB...
16381637
List[int],
1638+
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C.TensorB...
16391639
self.module.meta["non_const_buffer_sizes"],
16401640
),
16411641
container_meta_type=self.container_meta_type,

0 commit comments

Comments
 (0)