From 4ec853785884c3ef2cec600589e72b0abee6d73b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20Mart=C3=ADn=20Bl=C3=A1zquez?= Date: Mon, 2 Sep 2024 18:51:53 +0200 Subject: [PATCH] Include system prompt --- .../examples/streamed_text_generation/main.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/candle-holder-examples/examples/streamed_text_generation/main.rs b/candle-holder-examples/examples/streamed_text_generation/main.rs index cc6dfe6..a924958 100644 --- a/candle-holder-examples/examples/streamed_text_generation/main.rs +++ b/candle-holder-examples/examples/streamed_text_generation/main.rs @@ -46,8 +46,12 @@ fn main() -> Result<()> { let model = AutoModelForCausalLM::from_pretrained(args.model, &device, None, None)?; let mut encodings = if args.apply_chat_template { + let mut messages = vec![Message::user(args.prompt.clone())]; + if let Some(system_prompt) = args.system_prompt { + messages.insert(0, Message::system(system_prompt)) + } tokenizer - .apply_chat_template_and_encode(vec![Message::user(args.prompt)], true) + .apply_chat_template_and_encode(messages, true) .map_err(Error::msg)? } else { tokenizer