""" NPU-optimized activation functions for Ascend. Provides ``AscendSiluAndMul`` that uses ``torch_npu.npu_swiglu`` for fused SiLU+Mul on NPU devices. """ import torch from vllm.model_executor.layers.activation import SiluAndMul class AscendSiluAndMul(SiluAndMul): """SiluAndMul using torch_npu.npu_swiglu on Ascend NPU.""" def forward_oot(self, x: torch.Tensor) -> torch.Tensor: import torch_npu # noqa: F401 return torch_npu.npu_swiglu(x)