f"Flash Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `flash-attn>={_REQUIRED_FLASH_VERSION}`."
f"Flash Attention 3 backend '{backend.value}' is not usable because of missing package or the version is too old. Please build FA3 beta release from source."
f"Sage Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `sageattention>={_REQUIRED_SAGE_VERSION}`."
)
elifbackend==AttentionBackendName.FLEX:
ifnot_CAN_USE_FLEX_ATTN:
raiseRuntimeError(
f"Flex Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `torch>=2.5.0`."
)
elifbackend==AttentionBackendName._NATIVE_NPU:
ifnot_CAN_USE_NPU_ATTN:
raiseRuntimeError(
f"NPU Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `torch_npu`."
)
elifbackend==AttentionBackendName._NATIVE_XLA:
ifnot_CAN_USE_XLA_ATTN:
raiseRuntimeError(
f"XLA Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `torch_xla>={_REQUIRED_XLA_VERSION}`."
)
elifbackend==AttentionBackendName.XFORMERS:
ifnot_CAN_USE_XFORMERS_ATTN:
raiseRuntimeError(
f"Xformers Attention backend '{backend.value}' is not usable because of missing package or the version is too old. Please install `xformers>={_REQUIRED_XFORMERS_VERSION}`."