We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 3ded8e2 commit 2738c6fCopy full SHA for 2738c6f
vllm_ascend/patch/worker/patch_common/patch_attention_selector.py
@@ -14,6 +14,7 @@
14
# See the License for the specific language governing permissions and
15
# limitations under the License.
16
#
17
+# mypy: ignore-errors
18
from functools import cache
19
from typing import Optional
20
@@ -73,8 +74,8 @@ def _cached_get_attn_backend(
73
74
# use the placeholder NO_ATTENTION
75
if is_attention_free:
76
from vllm.attention.backends.placeholder_attn import \
- PlaceholderAttentionBackend # type: ignore
77
- return PlaceholderAttentionBackend # type: ignore
+ PlaceholderAttentionBackend
78
+ return PlaceholderAttentionBackend
79
80
# Check whether a particular choice of backend was
81
# previously forced.
0 commit comments