@@ -1120,9 +1120,7 @@ async def _preprocess_chat(
11201120 _chat_template_kwargs .update (chat_template_kwargs or {})
11211121
11221122 request_prompt : str | list [int ]
1123- print (
1124- f"--------{ isinstance (tokenizer , DeepseekV32Tokenizer )} --{ type (tokenizer )} --"
1125- )
1123+
11261124 if tokenizer is None :
11271125 request_prompt = "placeholder"
11281126 elif isinstance (tokenizer , MistralTokenizer ):
@@ -1131,21 +1129,20 @@ async def _preprocess_chat(
11311129 messages = messages ,
11321130 ** _chat_template_kwargs ,
11331131 )
1132+ elif isinstance (tokenizer , DeepseekV32Tokenizer ):
1133+ request_prompt = tokenizer .apply_chat_template (
1134+ conversation = conversation ,
1135+ messages = messages ,
1136+ model_config = model_config ,
1137+ ** _chat_template_kwargs ,
1138+ )
11341139 else :
1135- try :
1136- request_prompt = apply_hf_chat_template (
1137- tokenizer = tokenizer ,
1138- conversation = conversation ,
1139- model_config = model_config ,
1140- ** _chat_template_kwargs ,
1141- )
1142- except ValueError :
1143- request_prompt = tokenizer .apply_chat_template (
1144- conversation = conversation ,
1145- messages = messages ,
1146- model_config = model_config ,
1147- ** _chat_template_kwargs ,
1148- )
1140+ request_prompt = apply_hf_chat_template (
1141+ tokenizer = tokenizer ,
1142+ conversation = conversation ,
1143+ model_config = model_config ,
1144+ ** _chat_template_kwargs ,
1145+ )
11491146
11501147 mm_data = await mm_data_future
11511148
0 commit comments