Skip to content

Commit 3b221cb

Browse files
authored
[BugFix] respect VLLM_LOGGING_LEVEL in logger (#29761)
Signed-off-by: Boyuan Feng <[email protected]>
1 parent 0037b57 commit 3b221cb

File tree

4 files changed

+9
-5
lines changed

4 files changed

+9
-5
lines changed

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1174,6 +1174,7 @@ def ctx(level: int | str):
11741174
"level": level,
11751175
"filename": log_path.as_posix(),
11761176
}
1177+
config["loggers"]["vllm"]["level"] = level
11771178

11781179
config_path.write_text(json.dumps(config))
11791180

tests/test_config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -716,7 +716,7 @@ def test_is_chunked_prefill_supported(
716716
):
717717
model_config = ModelConfig(model_id, trust_remote_code=True)
718718
assert model_config.attn_type == expected_attn_type
719-
with caplog_vllm.at_level(level=logging.DEBUG):
719+
with caplog_vllm.at_level(level=logging.DEBUG, logger="vllm"):
720720
assert model_config.is_chunked_prefill_supported == expected_result
721721
assert reason in caplog_vllm.text
722722

@@ -835,7 +835,7 @@ def test_is_prefix_caching_supported(
835835
):
836836
model_config = ModelConfig(model_id, trust_remote_code=True)
837837
assert model_config.attn_type == expected_attn_type
838-
with caplog_vllm.at_level(level=logging.DEBUG):
838+
with caplog_vllm.at_level(level=logging.DEBUG, logger="vllm"):
839839
assert model_config.is_prefix_caching_supported == expected_result
840840
assert reason in caplog_vllm.text
841841

tests/test_logger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def test_default_vllm_root_logger_configuration(monkeypatch):
5757
_configure_vllm_root_logger()
5858

5959
logger = logging.getLogger("vllm")
60-
assert logger.level == logging.DEBUG
60+
assert logger.level == logging.INFO
6161
assert not logger.propagate
6262

6363
handler = logger.handlers[0]
@@ -524,7 +524,7 @@ def mp_function(**kwargs):
524524

525525

526526
def test_caplog_mp_fork(caplog_vllm, caplog_mp_fork):
527-
with caplog_vllm.at_level(logging.DEBUG), caplog_mp_fork():
527+
with caplog_vllm.at_level(logging.DEBUG, logger="vllm"), caplog_mp_fork():
528528
import multiprocessing
529529

530530
ctx = multiprocessing.get_context("fork")

vllm/logger.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ def _use_color() -> bool:
6262
"loggers": {
6363
"vllm": {
6464
"handlers": ["vllm"],
65-
"level": "DEBUG",
65+
"level": envs.VLLM_LOGGING_LEVEL,
6666
"propagate": False,
6767
},
6868
},
@@ -175,6 +175,9 @@ def _configure_vllm_root_logger() -> None:
175175
vllm_handler["stream"] = envs.VLLM_LOGGING_STREAM
176176
vllm_handler["formatter"] = "vllm_color" if _use_color() else "vllm"
177177

178+
vllm_loggers = logging_config["loggers"]["vllm"]
179+
vllm_loggers["level"] = envs.VLLM_LOGGING_LEVEL
180+
178181
if envs.VLLM_LOGGING_CONFIG_PATH:
179182
if not path.exists(envs.VLLM_LOGGING_CONFIG_PATH):
180183
raise RuntimeError(

0 commit comments

Comments
 (0)