Skip to content

Commit 63778b7

Browse files
authored
fix load json data with '\n' failed (#62)
* fix load json data with '\n' failed * Update logger.info
1 parent 0451627 commit 63778b7

File tree

1 file changed

+8
-4
lines changed

1 file changed

+8
-4
lines changed

llmserve/backend/llm/pipelines/default_pipeline.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ def __init__(
4242

4343
def preprocess(self, prompts: List[str], **generate_kwargs):
4444
st = time.monotonic()
45-
prompt_text = construct_prompts(
46-
prompts, prompt_format=self.prompt_format)
45+
prompt_text = construct_prompts(prompts, prompt_format=self.prompt_format)
4746
instruction_text = construct_prompts(prompts, prompt_format="")
4847

4948
if generate_kwargs.get("eos_token", False):
@@ -57,9 +56,14 @@ def preprocess(self, prompts: List[str], **generate_kwargs):
5756

5857
try:
5958
prompt_text_bak = prompt_text
60-
prompt_text = [json.loads(prompt) for prompt in prompt_text]
59+
logger.info(f"call json.loads")
60+
# for p in prompt_text:
61+
# logger.info(f"{p}")
62+
prompt_text = [json.loads(prompt, strict=False) for prompt in prompt_text]
63+
logger.info(f"call tokenizer.apply_chat_template")
6164
prompt_text = [self.tokenizer.apply_chat_template(prompt_obj, tokenize=False, add_generation_prompt=True) for prompt_obj in prompt_text]
62-
except:
65+
except Exception as ex:
66+
logger.error(f"Exception apply_chat_template: {ex}")
6367
logger.info("Seems no chat template from user or the model donot has a 'chat template'")
6468
prompt_text = prompt_text_bak
6569

0 commit comments

Comments
 (0)