From 10b8174c1b0c790a0ed1df7108128bef0b5548cd Mon Sep 17 00:00:00 2001 From: Logan Blyth Date: Sat, 17 May 2025 07:13:03 -0400 Subject: [PATCH] docs: Inform users about the supports_tools flag (#30839) Closes #30115 Release Notes: - Improved documentation on Ollama `supports_tools` feature. --------- Signed-off-by: Logan Blyth Co-authored-by: Ben Kunkle --- docs/src/ai/configuration.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/src/ai/configuration.md b/docs/src/ai/configuration.md index bd40307cc9..b9bf07fbf5 100644 --- a/docs/src/ai/configuration.md +++ b/docs/src/ai/configuration.md @@ -168,6 +168,7 @@ Depending on your hardware or use-case you may wish to limit or increase the con "name": "qwen2.5-coder", "display_name": "qwen 2.5 coder 32K", "max_tokens": 32768 + "supports_tools": true } ] } @@ -179,6 +180,12 @@ If you specify a context length that is too large for your hardware, Ollama will You may also optionally specify a value for `keep_alive` for each available model. This can be an integer (seconds) or alternately a string duration like "5m", "10m", "1h", "1d", etc., For example `"keep_alive": "120s"` will allow the remote server to unload the model (freeing up GPU VRAM) after 120seconds. +The `supports_tools` option controls whether or not the model will use additional tools. +If the model is tagged with `tools` in the Ollama catalog this option should be supplied, and built in profiles `Ask` and `Write` can be used. +If the model is not tagged with `tools` in the Ollama catalog, this +option can still be supplied with value `true`; however be aware that only the +`Minimal` built in profile will work. + ### OpenAI {#openai} > ✅ Supports tool use