From ed8a2c8793cb3f14bed3f32fb791f620b0667739 Mon Sep 17 00:00:00 2001 From: KCaverly Date: Wed, 18 Oct 2023 10:35:11 -0400 Subject: [PATCH] revert change to return only the text and inside return all text inside markdown blocks --- crates/assistant/src/prompts.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 7aafe75920..18e9e18f7d 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -243,7 +243,7 @@ pub fn generate_content_prompt( } prompts.push("Never make remarks about the output.".to_string()); prompts.push("Do not return any text, except the generated code.".to_string()); - prompts.push("Do not wrap your text in a Markdown block".to_string()); + prompts.push("Always wrap your code in a Markdown block".to_string()); let current_messages = [ChatCompletionRequestMessage { role: "user".to_string(), @@ -256,7 +256,11 @@ pub fn generate_content_prompt( tiktoken_rs::num_tokens_from_messages(model, ¤t_messages) { let max_token_count = tiktoken_rs::model::get_context_size(model); - let intermediate_token_count = max_token_count - current_token_count; + let intermediate_token_count = if max_token_count > current_token_count { + max_token_count - current_token_count + } else { + 0 + }; if intermediate_token_count < RESERVED_TOKENS_FOR_GENERATION { 0