From c69ab69352e6c14e0cfc041db2c41ff1b23cb0ca Mon Sep 17 00:00:00 2001 From: DarkLight1337 Date: Fri, 24 Jan 2025 16:11:22 +0000 Subject: [PATCH] Fix blip2 processing Signed-off-by: DarkLight1337 --- vllm/model_executor/models/blip2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vllm/model_executor/models/blip2.py b/vllm/model_executor/models/blip2.py index 09c5087c2dc3..b559ac677a74 100644 --- a/vllm/model_executor/models/blip2.py +++ b/vllm/model_executor/models/blip2.py @@ -481,14 +481,14 @@ def _get_prompt_replacements( bos_token_id = tokenizer.bos_token_id assert isinstance(bos_token_id, int) - image_token_id = vocab["image"] + image_token_id = vocab[""] num_image_tokens = self.info.get_num_image_tokens() image_tokens = [image_token_id] * num_image_tokens return [ PromptReplacement( modality="image", - target="", + target=[bos_token_id], replacement=PromptReplacementDetails( full=image_tokens + [bos_token_id], features=image_tokens,