Skip to content

Commit af47de5

Browse files
princeprideDarkLight1337
authored andcommitted
[New Model] BAGEL support (AR only) (vllm-project#28439)
Signed-off-by: princepride <[email protected]> Signed-off-by: 汪志鹏 <[email protected]> Co-authored-by: Cyrus Leung <[email protected]>
1 parent 98c3e02 commit af47de5

File tree

11 files changed

+777
-0
lines changed

11 files changed

+777
-0
lines changed

docs/models/supported_models.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -661,6 +661,7 @@ These models primarily accept the [`LLM.generate`](./generative_models.md#llmgen
661661
| `AriaForConditionalGeneration` | Aria | T + I<sup>+</sup> | `rhymes-ai/Aria` | | |
662662
| `AudioFlamingo3ForConditionalGeneration` | AudioFlamingo3 | T + A<sup>+</sup> | `nvidia/audio-flamingo-3-hf`, `nvidia/music-flamingo-hf` | ✅︎ | ✅︎ |
663663
| `AyaVisionForConditionalGeneration` | Aya Vision | T + I<sup>+</sup> | `CohereLabs/aya-vision-8b`, `CohereLabs/aya-vision-32b`, etc. | | ✅︎ |
664+
| `BagelForConditionalGeneration` | BAGEL | T + I<sup>+</sup> | `ByteDance-Seed/BAGEL-7B-MoT` | ✅︎ | ✅︎ |
664665
| `BeeForConditionalGeneration` | Bee-8B | T + I<sup>E+</sup> | `Open-Bee/Bee-8B-RL`, `Open-Bee/Bee-8B-SFT` | | ✅︎ |
665666
| `Blip2ForConditionalGeneration` | BLIP-2 | T + I<sup>E</sup> | `Salesforce/blip2-opt-2.7b`, `Salesforce/blip2-opt-6.7b`, etc. | | ✅︎ |
666667
| `ChameleonForConditionalGeneration` | Chameleon | T + I | `facebook/chameleon-7b`, etc. | | ✅︎ |

examples/offline_inference/vision_language.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,32 @@ def run_bee(questions: list[str], modality: str) -> ModelRequestData:
118118
)
119119

120120

121+
def run_bagel(questions: list[str], modality: str) -> ModelRequestData:
122+
assert modality == "image"
123+
model_name = "ByteDance-Seed/BAGEL-7B-MoT"
124+
125+
engine_args = EngineArgs(
126+
model=model_name,
127+
trust_remote_code=True,
128+
max_model_len=8192,
129+
max_num_seqs=2,
130+
limit_mm_per_prompt={modality: 1},
131+
)
132+
133+
prompts = [
134+
(
135+
f"<|im_start|>user\n<|image_pad|>\n{question}<|im_end|>\n"
136+
f"<|im_start|>assistant\n"
137+
)
138+
for question in questions
139+
]
140+
141+
return ModelRequestData(
142+
engine_args=engine_args,
143+
prompts=prompts,
144+
)
145+
146+
121147
# BLIP-2
122148
def run_blip2(questions: list[str], modality: str) -> ModelRequestData:
123149
assert modality == "image"
@@ -1832,6 +1858,7 @@ def run_tarsier2(questions: list[str], modality: str) -> ModelRequestData:
18321858
model_example_map = {
18331859
"aria": run_aria,
18341860
"aya_vision": run_aya_vision,
1861+
"bagel": run_bagel,
18351862
"bee": run_bee,
18361863
"blip-2": run_blip2,
18371864
"chameleon": run_chameleon,

tests/models/registry.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -582,6 +582,7 @@ def check_available_online(
582582
"nvidia/audio-flamingo-3-hf", min_transformers_version="5.0.0.dev"
583583
),
584584
"AyaVisionForConditionalGeneration": _HfExamplesInfo("CohereLabs/aya-vision-8b"),
585+
"BagelForConditionalGeneration": _HfExamplesInfo("ByteDance-Seed/BAGEL-7B-MoT"),
585586
"BeeForConditionalGeneration": _HfExamplesInfo(
586587
"Open-Bee/Bee-8B-RL",
587588
trust_remote_code=True,

0 commit comments

Comments
 (0)