Skip to content

Commit ea54388

Browse files
authored
Drop ascend scheduler (#4623)
It's safe to drop ascend scheduler now. The related test and doc has been removed already - vLLM version: v0.12.0 - vLLM main: vllm-project/vllm@ad32e3e Signed-off-by: wangxiyuan <[email protected]>
1 parent 00b4fb8 commit ea54388

File tree

12 files changed

+34
-767
lines changed

12 files changed

+34
-767
lines changed

.github/workflows/vllm_ascend_test_pr_light.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,6 @@ jobs:
139139
--ignore tests/ut/kv_connector/test_remote_prefill_lifecycle.py \
140140
--ignore tests/ut/kv_connector/test_remote_decode_lifecycle.py \
141141
--ignore tests/ut/kv_connector/test_llmdatadist_connector.py \
142-
--ignore tests/ut/ops/test_linear.py \
143142
--ignore tests/ut/core/test_scheduler_dynamic_batch.py
144143
145144
- name: Upload coverage to Codecov

tests/ut/ops/test_linear.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def test_oproj_tp(self):
9999

100100
ascend_config._ASCEND_CONFIG = MagicMock()
101101
ascend_config._ASCEND_CONFIG.oproj_tensor_parallel_size = 2
102-
ascend_config._ASCEND_CONFIG.ascend_scheduler_config.enabled = False
102+
ascend_config._ASCEND_CONFIG.recompute_scheduler_enable = False
103103

104104
linear = AscendRowParallelLinear(
105105
input_size=16,

tests/ut/ops/test_vocab_parallel_embedding.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -209,12 +209,7 @@ def setUp(self):
209209
return_value=torch.randn(1, self.vocab_size)),
210210
patch(
211211
"vllm_ascend.ops.vocab_parallel_embedding.get_lmhead_tp_group.all_gather",
212-
return_value=torch.randn(1, self.vocab_size)),
213-
patch(
214-
"vllm_ascend.core.schedule_config.AscendSchedulerConfig.initialize_from_config",
215-
return_value=MagicMock(max_num_batched_tokens=1000,
216-
max_model_len=512,
217-
enable_chunked_prefill=False))
212+
return_value=torch.randn(1, self.vocab_size))
218213
]
219214

220215
for p in self.patches:

tests/ut/test_platform.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ def mock_vllm_config():
3232
def mock_vllm_ascend_config():
3333
mock_ascend_config = MagicMock()
3434
mock_ascend_config.torchair_graph_config.enabled = False
35-
mock_ascend_config.ascend_scheduler_config.enabled = False
3635
mock_ascend_config.enable_shared_expert_dp = False
3736
return mock_ascend_config
3837

vllm_ascend/ascend_config.py

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,6 @@ def __init__(self, vllm_config):
4545
self.ascend_compilation_config = AscendCompilationConfig(
4646
**ascend_compilation_config)
4747

48-
ascend_scheduler_config = additional_config.get(
49-
"ascend_scheduler_config", {})
50-
self.ascend_scheduler_config = AscendSchedulerConfig(
51-
ascend_scheduler_config)
52-
5348
# Dump / PrecisionDebugger configuration
5449
dump_config_path = additional_config.get("dump_config", None)
5550
self.dump_config = DumpConfig(dump_config_path)
@@ -255,20 +250,6 @@ def __init__(self, torchair_graph_config, vllm_config, additional_config):
255250
)
256251

257252

258-
class AscendSchedulerConfig:
259-
"""
260-
Configuration Object for ascend_scheduler_config from additional_config
261-
"""
262-
263-
def __init__(self, ascend_scheduler_config: dict):
264-
self.enabled = ascend_scheduler_config.get("enabled", False)
265-
# Ascend scheduler is based on vllm v0 scheduler, so we should support
266-
# all vllm v0 scheduler configs as well.
267-
for k, v in ascend_scheduler_config.items():
268-
if not hasattr(self, k):
269-
setattr(self, k, v)
270-
271-
272253
class DumpConfig:
273254
"""
274255
Configuration object for dump/PrecisionDebugger settings.

vllm_ascend/core/schedule_config.py

Lines changed: 0 additions & 105 deletions
This file was deleted.

0 commit comments

Comments
 (0)