Skip to content

Commit

Permalink
[Misc]Fix Idefics3Model argument (vllm-project#10255)
Browse files Browse the repository at this point in the history
Signed-off-by: Jee Jee Li <[email protected]>
  • Loading branch information
jeejeelee authored Nov 12, 2024
1 parent 36c513a commit a838ba7
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions vllm/model_executor/models/idefics3.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,18 +420,18 @@ def __init__(self, *, vllm_config: VllmConfig, prefix: str = ""):
super().__init__()

config = vllm_config.model_config.hf_config
cache_config = vllm_config.cache_config
quant_config = vllm_config.quant_config

self.config = config
self.padding_idx = self.config.text_config.pad_token_id
self.vocab_size = self.config.text_config.vocab_size

self.vision_model = Idefics3VisionTransformer(config.vision_config,
quant_config)
self.connector = Idefics3Connector(config)
self.text_model = LlamaModel(config.text_config, cache_config,
quant_config)
self.text_model = LlamaModel(
vllm_config=vllm_config.with_hf_config(config.text_config),
prefix=maybe_prefix(prefix, "text_model"),
)

self.image_seq_len = int(
((config.vision_config.image_size //
Expand Down

0 comments on commit a838ba7

Please sign in to comment.