From 97e9137cb7b46a56dd59504b91de310e8155af91 Mon Sep 17 00:00:00 2001 From: "Siddharth M. Bhatia" Date: Sat, 16 Nov 2024 08:18:53 -0800 Subject: [PATCH] Update references of Ollama Llama 3.1 to model Llama 3.2 (#20757) Release Notes: - N/A --- crates/language_model/src/provider/ollama.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index ac79bb2ed584c..34859827818c7 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -35,7 +35,7 @@ pub struct OllamaSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { - /// The model name in the Ollama API (e.g. "llama3.1:latest") + /// The model name in the Ollama API (e.g. "llama3.2:latest") pub name: String, /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel. pub display_name: Option, @@ -446,7 +446,7 @@ impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let is_authenticated = self.state.read(cx).is_authenticated(); - let ollama_intro = "Get up and running with Llama 3.1, Mistral, Gemma 2, and other large language models with Ollama."; + let ollama_intro = "Get up and running with Llama 3.2, Mistral, Gemma 2, and other large language models with Ollama."; let ollama_reqs = "Ollama must be running with at least one model installed to use it in the assistant."; @@ -475,7 +475,7 @@ impl Render for ConfigurationView { .bg(inline_code_bg) .px_1p5() .rounded_md() - .child(Label::new("ollama run llama3.1")), + .child(Label::new("ollama run llama3.2")), ), ), )