Mention Lllama 3.3 in Ollama config panel (#21866)
Trivial, but makes us not look outdated.
This commit is contained in:
parent
13a81e454a
commit
fff12ec1e5
1 changed files with 1 additions and 1 deletions
|
@ -447,7 +447,7 @@ impl Render for ConfigurationView {
|
|||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let is_authenticated = self.state.read(cx).is_authenticated();
|
||||
|
||||
let ollama_intro = "Get up and running with Llama 3.2, Mistral, Gemma 2, and other large language models with Ollama.";
|
||||
let ollama_intro = "Get up and running with Llama 3.3, Mistral, Gemma 2, and other large language models with Ollama.";
|
||||
let ollama_reqs =
|
||||
"Ollama must be running with at least one model installed to use it in the assistant.";
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue