From 6d687a2c2c26507d0dd8ab49ecc5c5871266a419 Mon Sep 17 00:00:00 2001 From: tidely <43219534+tidely@users.noreply.github.com> Date: Fri, 30 May 2025 23:12:39 +0300 Subject: [PATCH] ollama: Change default context size to 4096 (#31682) Ollama increased their default context size from 2048 to 4096 tokens in version v0.6.7, which released over a month ago. https://github.com/ollama/ollama/releases/tag/v0.6.7 Release Notes: - ollama: Update default model context to 4096 (matching upstream) --- crates/ollama/src/ollama.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index a18c134c4cc5476afcf1ea744238ecbe7f05594d..a42510279c56b63410d0b68ec63d00c273a8d042 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -42,7 +42,7 @@ pub struct Model { fn get_max_tokens(name: &str) -> usize { /// Default context length for unknown models. - const DEFAULT_TOKENS: usize = 2048; + const DEFAULT_TOKENS: usize = 4096; /// Magic number. Lets many Ollama models work with ~16GB of ram. const MAXIMUM_TOKENS: usize = 16384;