Skip to content

Commit b65124d

Browse files
committed
invert nope check
1 parent 44ef111 commit b65124d

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

vllm/model_executor/models/llama4.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def forward(
245245
# rotary_emb is fused into self.attn in this case
246246
if self.use_fused_rope:
247247
assert not (
248-
self.attn_temperature_tuning
248+
self.attn_temperature_tuning or self.nope
249249
), f"{self.attn_temperature_tuning=} and {self.nope=} must be False with {VLLM_ROCM_USE_AITER_TRITON_FUSED_ROPE_ZEROS_KV_CACHE=}"
250250
attn_output = self.attn(q, k, v, positions=positions)
251251
output, _ = self.o_proj(attn_output)

0 commit comments

Comments
 (0)