Ollama llama3.2 default context size (#18366)

Release Notes:

- Ollama: Added llama3.2 support
This commit is contained in:
Peter Tripp 2024-09-25 22:01:12 +00:00 committed by GitHub
parent 4b4565fb7a
commit 7398f795e3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -83,7 +83,7 @@ fn get_max_tokens(name: &str) -> usize {
"codellama" | "starcoder2" => 16384, "codellama" | "starcoder2" => 16384,
"mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768,
"llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder"
| "qwen2.5-coder" => 128000, | "llama3.2" | "qwen2.5-coder" => 128000,
_ => DEFAULT_TOKENS, _ => DEFAULT_TOKENS,
} }
.clamp(1, MAXIMUM_TOKENS) .clamp(1, MAXIMUM_TOKENS)