Move default template so it doesn't override tokenizer (#987)

pull/989/head
Nathan Sarrazin 2024-04-08 12:39:20 +02:00 committed by GitHub
parent 896e0fea5b
commit 9437108347
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 4 additions and 7 deletions

View File

@ -44,11 +44,7 @@ const modelConfig = z.object({
datasetUrl: z.string().url().optional(),
preprompt: z.string().default(""),
prepromptUrl: z.string().url().optional(),
chatPromptTemplate: z
.string()
.default(
"{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}"
), // ChatML
chatPromptTemplate: z.string().optional(),
promptExamples: z
.array(
z.object({
@ -86,8 +82,9 @@ async function getChatPromptRender(
let tokenizer: PreTrainedTokenizer;
if (!m.tokenizer) {
throw new Error(
"No tokenizer specified and no chat prompt template specified for model " + m.name
return compileTemplate<ChatTemplateInput>(
"{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
m
);
}