Skip to content

Commit 016ff63

Browse files
白永斌845473182
authored andcommitted
fix ut
Signed-off-by: 白永斌 <[email protected]> Signed-off-by: 欧派果奶我还要 <[email protected]>
1 parent 765a3c3 commit 016ff63

File tree

3 files changed

+4
-7
lines changed

3 files changed

+4
-7
lines changed

tests/ut/ops/test_fused_moe.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -409,9 +409,9 @@ def test_unified_apply_mlp_without_quantization(self,
409409
topk_scales = torch.randn(10, 1, dtype=torch.float16)
410410

411411
result = unified_apply_mlp(hidden_states=hidden_states,
412-
w1=w1,
412+
w1=[w1],
413413
w1_scale=None,
414-
w2=w2,
414+
w2=[w2],
415415
w2_scale=None,
416416
group_list=group_list,
417417
dynamic_scale=None,
@@ -512,9 +512,9 @@ def test_unified_apply_mlp_without_quantization_310p(
512512
topk_scales = torch.randn(10, 1, dtype=torch.float16)
513513

514514
result = unified_apply_mlp(hidden_states=hidden_states,
515-
w1=w1,
515+
w1=[w1],
516516
w1_scale=None,
517-
w2=w2,
517+
w2=[w2],
518518
w2_scale=None,
519519
group_list=group_list,
520520
dynamic_scale=None,

vllm_ascend/ops/fused_moe/moe_comm_method.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,6 @@ def fused_experts(
137137
permuted_hidden_states, expert_tokens, dynamic_scale, group_list_type, topk_scales, context_metadata = \
138138
results["hidden_states"], results["group_list"], results.get("dynamic_scale"), results["group_list_type"], results.get("topk_scales"), results.get("context_metadata")
139139

140-
assert w1_scale is not None and w2_scale is not None
141140
mlp_output = unified_apply_mlp(hidden_states=permuted_hidden_states,
142141
w1=w1,
143142
w1_scale=w1_scale,

vllm_ascend/ops/fused_moe/moe_mlp.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -262,9 +262,7 @@ def unquant_apply_mlp(hidden_states: torch.Tensor,
262262

263263
def unified_apply_mlp(hidden_states: torch.Tensor,
264264
w1: list[torch.Tensor],
265-
w1_scale: list[torch.Tensor],
266265
w2: list[torch.Tensor],
267-
w2_scale: list[torch.Tensor],
268266
group_list: torch.Tensor,
269267
w1_scale: Optional[list[torch.Tensor]] = None,
270268
w2_scale: Optional[list[torch.Tensor]] = None,

0 commit comments

Comments
 (0)