From b9bb27512caf402727680fc3ad6926f9006adfce Mon Sep 17 00:00:00 2001 From: KCaverly Date: Wed, 18 Oct 2023 13:10:31 -0400 Subject: [PATCH] fix template ordering during prompt chain generation --- crates/ai/src/templates/base.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/ai/src/templates/base.rs b/crates/ai/src/templates/base.rs index d4882bafc9..db437a029c 100644 --- a/crates/ai/src/templates/base.rs +++ b/crates/ai/src/templates/base.rs @@ -77,8 +77,6 @@ impl PromptChain { let mut sorted_indices = (0..self.templates.len()).collect::>(); sorted_indices.sort_by_key(|&i| Reverse(&self.templates[i].0)); - let mut prompts = Vec::new(); - // If Truncate let mut tokens_outstanding = if truncate { Some(self.args.model.capacity()? - self.args.reserved_tokens) @@ -86,6 +84,7 @@ impl PromptChain { None }; + let mut prompts = vec!["".to_string(); sorted_indices.len()]; for idx in sorted_indices { let (_, template) = &self.templates[idx]; if let Some((template_prompt, prompt_token_count)) = @@ -96,7 +95,7 @@ impl PromptChain { &prompt_token_count, &template_prompt ); if template_prompt != "" { - prompts.push(template_prompt); + prompts[idx] = template_prompt; if let Some(remaining_tokens) = tokens_outstanding { let new_tokens = prompt_token_count + seperator_tokens;