File tree Expand file tree Collapse file tree 1 file changed +0
-12
lines changed
Expand file tree Collapse file tree 1 file changed +0
-12
lines changed Original file line number Diff line number Diff line change 66
77import pytest
88
9- from vllm .platforms import current_platform
10-
119from ..conftest import HfRunner , VllmRunner
1210from ..utils import multi_gpu_test , prep_prompts
1311from .registry import HF_EXAMPLE_MODELS
@@ -59,10 +57,6 @@ def check_implementation(
5957 )
6058
6159
62- @pytest .mark .skipif (
63- current_platform .is_rocm (),
64- reason = "Llama-3.2-1B-Instruct, Ilama-3.2-1B produce memory access fault." ,
65- )
6660@pytest .mark .parametrize (
6761 "model,model_impl" ,
6862 [
@@ -147,12 +141,6 @@ def test_quantization(
147141 max_tokens : int ,
148142 num_logprobs : int ,
149143) -> None :
150- if (
151- current_platform .is_rocm ()
152- and quantization_kwargs .get ("quantization" , "" ) == "bitsandbytes"
153- ):
154- pytest .skip ("bitsandbytes quantization is currently not supported in rocm." )
155-
156144 with vllm_runner (
157145 model ,
158146 model_impl = "auto" ,
You can’t perform that action at this time.
0 commit comments