Ollama llama3.2 default context size (#18366)

Peter Tripp created

Release Notes:

- Ollama: Added llama3.2 support

Change summary

crates/ollama/src/ollama.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

Detailed changes

crates/ollama/src/ollama.rs 🔗

@@ -83,7 +83,7 @@ fn get_max_tokens(name: &str) -> usize {
         "codellama" | "starcoder2" => 16384,
         "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768,
         "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder"
-        | "qwen2.5-coder" => 128000,
+        | "llama3.2" | "qwen2.5-coder" => 128000,
         _ => DEFAULT_TOKENS,
     }
     .clamp(1, MAXIMUM_TOKENS)