Skip to content

Commit 2c06cf3

Browse files
authored
[Bugfix] use served_model_name for multimodal error message (vllm-project#41003)
Signed-off-by: Moritz Sanft <58110325+msanft@users.noreply.github.com>
1 parent e6f710a commit 2c06cf3

2 files changed

Lines changed: 18 additions & 1 deletion

File tree

tests/multimodal/test_registry.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
Qwen2.5-VL visual component loading behavior.
66
"""
77

8+
from types import SimpleNamespace
9+
810
import pytest
911

1012
from vllm.multimodal import MULTIMODAL_REGISTRY
@@ -32,3 +34,17 @@ def test_supports_multimodal_inputs(model_id, limit_mm_per_prompt, expected):
3234
limit_mm_per_prompt=limit_mm_per_prompt,
3335
)
3436
assert MULTIMODAL_REGISTRY.supports_multimodal_inputs(ctx.model_config) is expected
37+
38+
39+
def test_create_processor_error_uses_served_model_name():
40+
model_config = SimpleNamespace(
41+
is_multimodal_model=False,
42+
model="/path/to/model/weights",
43+
served_model_name="friendly-model-name",
44+
)
45+
46+
with pytest.raises(
47+
ValueError,
48+
match="friendly-model-name is not a multimodal model",
49+
):
50+
MULTIMODAL_REGISTRY.create_processor(model_config)

vllm/multimodal/registry.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,8 @@ def create_processor(
207207
Create a multi-modal processor for a specific model and tokenizer.
208208
"""
209209
if not model_config.is_multimodal_model:
210-
raise ValueError(f"{model_config.model} is not a multimodal model")
210+
model_name = model_config.served_model_name or model_config.model
211+
raise ValueError(f"{model_name} is not a multimodal model")
211212

212213
model_cls = self._get_model_cls(model_config)
213214
factories = model_cls._processor_factory

0 commit comments

Comments
 (0)