Unverified Commit 4d921f2b authored by Cheng Wan's avatar Cheng Wan Committed by GitHub
Browse files

[hotfix] fix merge conflicts in FlashInferEPMoE (#8405)

parent 44d600cd
...@@ -1236,6 +1236,7 @@ class FlashInferEPMoE(EPMoE): ...@@ -1236,6 +1236,7 @@ class FlashInferEPMoE(EPMoE):
self.num_expert_group = num_expert_group self.num_expert_group = num_expert_group
self.topk_group = topk_group self.topk_group = topk_group
self.correction_bias = correction_bias self.correction_bias = correction_bias
self.use_flashinfer_trtllm_moe = use_flashinfer_trtllm_moe
def forward(self, hidden_states: torch.Tensor, router_logits: torch.Tensor): def forward(self, hidden_states: torch.Tensor, router_logits: torch.Tensor):
assert use_flashinfer_trtllm_moe assert use_flashinfer_trtllm_moe
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment