Skip to content

Commit

Permalink
[Frontend] Fix multiple values for keyword argument error (vllm-proje…
Browse files Browse the repository at this point in the history
…ct#10075) (vllm-project#10076)

Signed-off-by: Lei <[email protected]>
Signed-off-by: Isotr0py <[email protected]>
  • Loading branch information
DIYer22 authored and Isotr0py committed Nov 8, 2024
1 parent e253590 commit cc22826
Showing 1 changed file with 11 additions and 12 deletions.
23 changes: 11 additions & 12 deletions vllm/entrypoints/openai/serving_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,29 +443,28 @@ async def _preprocess_chat(
tokenizer,
)

_chat_template_kwargs: Dict[str, Any] = dict(
chat_template=chat_template,
add_generation_prompt=add_generation_prompt,
continue_final_message=continue_final_message,
tools=tool_dicts,
documents=documents,
)
_chat_template_kwargs.update(chat_template_kwargs or {})

request_prompt: Union[str, List[int]]
is_mistral_tokenizer = isinstance(tokenizer, MistralTokenizer)
if is_mistral_tokenizer:
request_prompt = apply_mistral_chat_template(
tokenizer,
messages=messages,
chat_template=chat_template,
add_generation_prompt=add_generation_prompt,
continue_final_message=continue_final_message,
tools=tool_dicts,
documents=documents,
**(chat_template_kwargs or {}),
**_chat_template_kwargs,
)
else:
request_prompt = apply_hf_chat_template(
tokenizer,
conversation=conversation,
chat_template=chat_template,
add_generation_prompt=add_generation_prompt,
continue_final_message=continue_final_message,
tools=tool_dicts,
documents=documents,
**(chat_template_kwargs or {}),
**_chat_template_kwargs,
)

mm_data = await mm_data_future
Expand Down

0 comments on commit cc22826

Please sign in to comment.