From c7158f0bd7ec3324fe299b4cf3e6b4e34a492436 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Tue, 22 Jul 2025 14:23:50 +0300 Subject: [PATCH 01/25] Improve distinguishing user from agent edits (#34716) We no longer rely on the `author` field to tell if a change was made by the user or the agent. The `author` can be set to `User` in many situations that are not really user-made edits, such as saving a file, accepting a change, auto-formatting, and more. I started tracking and fixing some of these cases, but found that inspecting changes in `diff_base` is a more reliable method. Also, we no longer show empty diffs. For example, if the user adds a line and then removes the same line, the final diff is empty, even though the buffer is marked as user-changed. Now we won't show such edit. There are still some issues to address: - When a user edits within an unaccepted agent-written block, this change becomes a part of the agent's edit. Rejecting this block will lose user edits. It won't be displayed in project notifications, either. - Accepting an agent block counts as a user-made edit. - Agent start to call `project_notifications` tool after seeing enough auto-calls. Release Notes: - N/A --- crates/agent/src/thread.rs | 20 ++++--- crates/assistant_tool/src/action_log.rs | 56 +++++++++++--------- crates/assistant_tools/src/edit_file_tool.rs | 3 ++ 3 files changed, 42 insertions(+), 37 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index e50763535a..f8a7827615 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -47,7 +47,7 @@ use std::{ time::{Duration, Instant}, }; use thiserror::Error; -use util::{ResultExt as _, debug_panic, post_inc}; +use util::{ResultExt as _, post_inc}; use uuid::Uuid; use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; @@ -1582,20 +1582,18 @@ impl Thread { model: Arc, cx: &mut App, ) -> Option { - let action_log = self.action_log.read(cx); + // Represent notification as a simulated `project_notifications` tool call + let tool_name = Arc::from("project_notifications"); + let tool = self.tools.read(cx).tool(&tool_name, cx)?; - if !action_log.has_unnotified_user_edits() { + if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) { return None; } - // Represent notification as a simulated `project_notifications` tool call - let tool_name = Arc::from("project_notifications"); - let Some(tool) = self.tools.read(cx).tool(&tool_name, cx) else { - debug_panic!("`project_notifications` tool not found"); - return None; - }; - - if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) { + if self + .action_log + .update(cx, |log, cx| log.unnotified_user_edits(cx).is_none()) + { return None; } diff --git a/crates/assistant_tool/src/action_log.rs b/crates/assistant_tool/src/action_log.rs index ecbbcc785e..672c048872 100644 --- a/crates/assistant_tool/src/action_log.rs +++ b/crates/assistant_tool/src/action_log.rs @@ -51,23 +51,13 @@ impl ActionLog { Some(self.tracked_buffers.get(buffer)?.snapshot.clone()) } - pub fn has_unnotified_user_edits(&self) -> bool { - self.tracked_buffers - .values() - .any(|tracked| tracked.has_unnotified_user_edits) - } - /// Return a unified diff patch with user edits made since last read or notification pub fn unnotified_user_edits(&self, cx: &Context) -> Option { - if !self.has_unnotified_user_edits() { - return None; - } - - let unified_diff = self + let diffs = self .tracked_buffers .values() .filter_map(|tracked| { - if !tracked.has_unnotified_user_edits { + if !tracked.may_have_unnotified_user_edits { return None; } @@ -95,9 +85,13 @@ impl ActionLog { Some(result) }) - .collect::>() - .join("\n\n"); + .collect::>(); + if diffs.is_empty() { + return None; + } + + let unified_diff = diffs.join("\n\n"); Some(unified_diff) } @@ -106,7 +100,7 @@ impl ActionLog { pub fn flush_unnotified_user_edits(&mut self, cx: &Context) -> Option { let patch = self.unnotified_user_edits(cx); self.tracked_buffers.values_mut().for_each(|tracked| { - tracked.has_unnotified_user_edits = false; + tracked.may_have_unnotified_user_edits = false; tracked.last_seen_base = tracked.diff_base.clone(); }); patch @@ -185,7 +179,7 @@ impl ActionLog { version: buffer.read(cx).version(), diff, diff_update: diff_update_tx, - has_unnotified_user_edits: false, + may_have_unnotified_user_edits: false, _open_lsp_handle: open_lsp_handle, _maintain_diff: cx.spawn({ let buffer = buffer.clone(); @@ -337,27 +331,34 @@ impl ActionLog { let new_snapshot = buffer_snapshot.clone(); let unreviewed_edits = tracked_buffer.unreviewed_edits.clone(); let edits = diff_snapshots(&old_snapshot, &new_snapshot); - if let ChangeAuthor::User = author - && !edits.is_empty() - { - tracked_buffer.has_unnotified_user_edits = true; - } + let mut has_user_changes = false; async move { if let ChangeAuthor::User = author { - apply_non_conflicting_edits( + has_user_changes = apply_non_conflicting_edits( &unreviewed_edits, edits, &mut base_text, new_snapshot.as_rope(), ); } - (Arc::new(base_text.to_string()), base_text) + + (Arc::new(base_text.to_string()), base_text, has_user_changes) } }); anyhow::Ok(rebase) })??; - let (new_base_text, new_diff_base) = rebase.await; + let (new_base_text, new_diff_base, has_user_changes) = rebase.await; + + this.update(cx, |this, _| { + let tracked_buffer = this + .tracked_buffers + .get_mut(buffer) + .context("buffer not tracked") + .unwrap(); + tracked_buffer.may_have_unnotified_user_edits |= has_user_changes; + })?; + Self::update_diff( this, buffer, @@ -829,11 +830,12 @@ fn apply_non_conflicting_edits( edits: Vec>, old_text: &mut Rope, new_text: &Rope, -) { +) -> bool { let mut old_edits = patch.edits().iter().cloned().peekable(); let mut new_edits = edits.into_iter().peekable(); let mut applied_delta = 0i32; let mut rebased_delta = 0i32; + let mut has_made_changes = false; while let Some(mut new_edit) = new_edits.next() { let mut conflict = false; @@ -883,8 +885,10 @@ fn apply_non_conflicting_edits( &new_text.chunks_in_range(new_bytes).collect::(), ); applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; + has_made_changes = true; } } + has_made_changes } fn diff_snapshots( @@ -958,7 +962,7 @@ struct TrackedBuffer { diff: Entity, snapshot: text::BufferSnapshot, diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>, - has_unnotified_user_edits: bool, + may_have_unnotified_user_edits: bool, _open_lsp_handle: OpenLspBufferHandle, _maintain_diff: Task<()>, _subscription: Subscription, diff --git a/crates/assistant_tools/src/edit_file_tool.rs b/crates/assistant_tools/src/edit_file_tool.rs index 0423f56145..6413677bd9 100644 --- a/crates/assistant_tools/src/edit_file_tool.rs +++ b/crates/assistant_tools/src/edit_file_tool.rs @@ -278,6 +278,9 @@ impl Tool for EditFileTool { .unwrap_or(false); if format_on_save_enabled { + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + })?; let format_task = project.update(cx, |project, cx| { project.format( HashSet::from_iter([buffer.clone()]), From a3850b3d38928e87f01b67d43755b5a5cc410605 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 22 Jul 2025 09:42:34 -0300 Subject: [PATCH 02/25] agent: Add `use_modifier_to_send` section in the settings view (#34866) This PR also converts all of these switch-based settings to use the new `SwitchField` component, introduced in https://github.com/zed-industries/zed/pull/34713. Release Notes: - agent: Added the ability to change the `use_modifier_to_send` setting from the agent panel settings UI. --- crates/agent_ui/src/agent_configuration.rs | 162 ++++++++------------- crates/ui/src/components/toggle.rs | 11 +- 2 files changed, 69 insertions(+), 104 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 0697f5dee7..b5ad6ba37c 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -28,7 +28,7 @@ use proto::Plan; use settings::{Settings, update_settings_file}; use ui::{ Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu, - Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*, + Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*, }; use util::ResultExt as _; use workspace::Workspace; @@ -330,119 +330,74 @@ impl AgentConfiguration { fn render_command_permission(&mut self, cx: &mut Context) -> impl IntoElement { let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions; + let fs = self.fs.clone(); - h_flex() - .gap_4() - .justify_between() - .flex_wrap() - .child( - v_flex() - .gap_0p5() - .max_w_5_6() - .child(Label::new("Allow running editing tools without asking for confirmation")) - .child( - Label::new( - "The agent can perform potentially destructive actions without asking for your confirmation.", - ) - .color(Color::Muted), - ), - ) - .child( - Switch::new( - "always-allow-tool-actions-switch", - always_allow_tool_actions.into(), - ) - .color(SwitchColor::Accent) - .on_click({ - let fs = self.fs.clone(); - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file::( - fs.clone(), - cx, - move |settings, _| { - settings.set_always_allow_tool_actions(allow); - }, - ); - } - }), - ) + SwitchField::new( + "single-file-review", + "Enable single-file agent reviews", + "Agent edits are also displayed in single-file editors for review.", + always_allow_tool_actions, + move |state, _window, cx| { + let allow = state == &ToggleState::Selected; + update_settings_file::(fs.clone(), cx, move |settings, _| { + settings.set_always_allow_tool_actions(allow); + }); + }, + ) } fn render_single_file_review(&mut self, cx: &mut Context) -> impl IntoElement { let single_file_review = AgentSettings::get_global(cx).single_file_review; + let fs = self.fs.clone(); - h_flex() - .gap_4() - .justify_between() - .flex_wrap() - .child( - v_flex() - .gap_0p5() - .max_w_5_6() - .child(Label::new("Enable single-file agent reviews")) - .child( - Label::new( - "Agent edits are also displayed in single-file editors for review.", - ) - .color(Color::Muted), - ), - ) - .child( - Switch::new("single-file-review-switch", single_file_review.into()) - .color(SwitchColor::Accent) - .on_click({ - let fs = self.fs.clone(); - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file::( - fs.clone(), - cx, - move |settings, _| { - settings.set_single_file_review(allow); - }, - ); - } - }), - ) + SwitchField::new( + "single-file-review", + "Enable single-file agent reviews", + "Agent edits are also displayed in single-file editors for review.", + single_file_review, + move |state, _window, cx| { + let allow = state == &ToggleState::Selected; + update_settings_file::(fs.clone(), cx, move |settings, _| { + settings.set_single_file_review(allow); + }); + }, + ) } fn render_sound_notification(&mut self, cx: &mut Context) -> impl IntoElement { let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done; + let fs = self.fs.clone(); - h_flex() - .gap_4() - .justify_between() - .flex_wrap() - .child( - v_flex() - .gap_0p5() - .max_w_5_6() - .child(Label::new("Play sound when finished generating")) - .child( - Label::new( - "Hear a notification sound when the agent is done generating changes or needs your input.", - ) - .color(Color::Muted), - ), - ) - .child( - Switch::new("play-sound-notification-switch", play_sound_when_agent_done.into()) - .color(SwitchColor::Accent) - .on_click({ - let fs = self.fs.clone(); - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file::( - fs.clone(), - cx, - move |settings, _| { - settings.set_play_sound_when_agent_done(allow); - }, - ); - } - }), - ) + SwitchField::new( + "sound-notification", + "Play sound when finished generating", + "Hear a notification sound when the agent is done generating changes or needs your input.", + play_sound_when_agent_done, + move |state, _window, cx| { + let allow = state == &ToggleState::Selected; + update_settings_file::(fs.clone(), cx, move |settings, _| { + settings.set_play_sound_when_agent_done(allow); + }); + }, + ) + } + + fn render_modifier_to_send(&mut self, cx: &mut Context) -> impl IntoElement { + let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send; + let fs = self.fs.clone(); + + SwitchField::new( + "modifier-send", + "Use modifier to submit a message", + "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux) required to send messages.", + use_modifier_to_send, + move |state, _window, cx| { + let allow = state == &ToggleState::Selected; + update_settings_file::(fs.clone(), cx, move |settings, _| { + settings.set_use_modifier_to_send(allow); + }); + }, + ) } fn render_general_settings_section(&mut self, cx: &mut Context) -> impl IntoElement { @@ -456,6 +411,7 @@ impl AgentConfiguration { .child(self.render_command_permission(cx)) .child(self.render_single_file_review(cx)) .child(self.render_sound_notification(cx)) + .child(self.render_modifier_to_send(cx)) } fn render_zed_plan_info(&self, plan: Option, cx: &mut Context) -> impl IntoElement { diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs index 759b225434..cf2a56b1c9 100644 --- a/crates/ui/src/components/toggle.rs +++ b/crates/ui/src/components/toggle.rs @@ -588,7 +588,7 @@ impl SwitchField { toggle_state: toggle_state.into(), on_click: Arc::new(on_click), disabled: false, - color: SwitchColor::default(), + color: SwitchColor::Accent, } } @@ -634,6 +634,15 @@ impl RenderOnce for SwitchField { } }), ) + .when(!self.disabled, |this| { + this.on_click({ + let on_click = self.on_click.clone(); + let toggle_state = self.toggle_state; + move |_click, window, cx| { + (on_click)(&toggle_state.inverse(), window, cx); + } + }) + }) } } From 31aab89ab0343b9b6388224f4e8ec2fa3f91fe2e Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Tue, 22 Jul 2025 18:24:06 +0530 Subject: [PATCH 03/25] ai_onboarding: Fix API key onboarding callout not showing properly (#34880) The current onboarding callout for ApiKeysWithProviders is broken. | Before | After | |--------|--------| | CleanShot 2025-07-22 at 16 21
53@2x | CleanShot 2025-07-22 at 16 22
38@2x | cc @danilo-leal Release Notes: - N/A --------- Co-authored-by: Danilo Leal --- .../src/agent_api_keys_onboarding.rs | 29 +++++++++++++------ 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs index 4f9e20cf77..883317e566 100644 --- a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs +++ b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs @@ -38,10 +38,6 @@ impl ApiKeysWithProviders { .map(|provider| (provider.icon(), provider.name().0.clone())) .collect() } - - pub fn has_providers(&self) -> bool { - !self.configured_providers.is_empty() - } } impl Render for ApiKeysWithProviders { @@ -53,11 +49,10 @@ impl Render for ApiKeysWithProviders { .map(|(icon, name)| { h_flex() .gap_1p5() - .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) + .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted)) .child(Label::new(name)) }); - - h_flex() + div() .mx_2p5() .p_1() .pb_0() @@ -85,8 +80,24 @@ impl Render for ApiKeysWithProviders { .border_x_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().panel_background) - .child(Icon::new(IconName::Info).size(IconSize::XSmall).color(Color::Muted)) - .child(Label::new("Or start now using API keys from your environment for the following providers:").color(Color::Muted)) + .child( + h_flex() + .min_w_0() + .gap_2() + .child( + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Muted) + ) + .child( + div() + .w_full() + .child( + Label::new("Or start now using API keys from your environment for the following providers:") + .color(Color::Muted) + ) + ) + ) .children(configured_providers_list) ) } From 30177b87d6c1fb76f809f470f1ea42249f7ab662 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 22 Jul 2025 08:51:30 -0500 Subject: [PATCH 04/25] Fix detection of pending bindings when binding in parent context matches (#34856) Broke in #34664 Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored-by: Conrad --- crates/gpui/src/keymap.rs | 314 ++++++++++++++++++++++++++++++-------- 1 file changed, 249 insertions(+), 65 deletions(-) diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 174dbc80f0..69700e64dc 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -5,7 +5,7 @@ pub use binding::*; pub use context::*; use crate::{Action, Keystroke, is_no_action}; -use collections::HashMap; +use collections::{HashMap, HashSet}; use smallvec::SmallVec; use std::any::TypeId; @@ -167,76 +167,49 @@ impl Keymap { input: &[Keystroke], context_stack: &[KeyContext], ) -> (SmallVec<[(BindingIndex, KeyBinding); 1]>, bool) { - let mut possibilities = self - .bindings() - .enumerate() - .rev() - .filter_map(|(ix, binding)| { - let depth = self.binding_enabled(binding, &context_stack)?; - let pending = binding.match_keystrokes(input)?; - Some((depth, BindingIndex(ix), binding, pending)) - }) - .collect::>(); - possibilities.sort_by(|(depth_a, ix_a, _, _), (depth_b, ix_b, _, _)| { + let mut bindings: SmallVec<[(usize, BindingIndex, &KeyBinding); 1]> = SmallVec::new(); + let mut pending_bindings: SmallVec<[(BindingIndex, &KeyBinding); 1]> = SmallVec::new(); + + for (ix, binding) in self.bindings().enumerate().rev() { + let Some(depth) = self.binding_enabled(binding, &context_stack) else { + continue; + }; + let Some(pending) = binding.match_keystrokes(input) else { + continue; + }; + + if !pending { + bindings.push((depth, BindingIndex(ix), binding)) + } else { + pending_bindings.push((BindingIndex(ix), binding)) + } + } + + bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| { depth_b.cmp(depth_a).then(ix_b.cmp(ix_a)) }); - let mut bindings: SmallVec<[(BindingIndex, KeyBinding, usize); 1]> = SmallVec::new(); - - // (pending, is_no_action, depth, keystrokes) - let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None; - - 'outer: for (depth, binding_index, binding, pending) in possibilities { - let is_no_action = is_no_action(&*binding.action); - // We only want to consider a binding pending if it has an action - // This, however, means that if we have both a NoAction binding and a binding - // with an action at the same depth, we should still set is_pending to true. - if let Some(pending_info) = pending_info_opt.as_mut() { - let (already_pending, pending_is_no_action, pending_depth, pending_keystrokes) = - *pending_info; - - // We only want to change the pending status if it's not already pending AND if - // the existing pending status was set by a NoAction binding. This avoids a NoAction - // binding erroneously setting the pending status to true when a binding with an action - // already set it to false - // - // We also want to change the pending status if the keystrokes don't match, - // meaning it's different keystrokes than the NoAction that set pending to false - if pending - && !already_pending - && pending_is_no_action - && (pending_depth == depth || pending_keystrokes != binding.keystrokes()) - { - pending_info.0 = !is_no_action; - } - } else { - pending_info_opt = Some(( - pending && !is_no_action, - is_no_action, - depth, - binding.keystrokes(), - )); - } - - if !pending { - bindings.push((binding_index, binding.clone(), depth)); - continue 'outer; - } - } - // sort by descending depth - bindings.sort_by(|a, b| a.2.cmp(&b.2).reverse()); - let bindings = bindings + let bindings: SmallVec<[_; 1]> = bindings .into_iter() - .map_while(|(binding_index, binding, _)| { - if is_no_action(&*binding.action) { - None - } else { - Some((binding_index, binding)) - } - }) + .take_while(|(_, _, binding)| !is_no_action(&*binding.action)) + .map(|(_, ix, binding)| (ix, binding.clone())) .collect(); - (bindings, pending_info_opt.unwrap_or_default().0) + let mut pending = HashSet::default(); + for (ix, binding) in pending_bindings.into_iter().rev() { + if let Some((binding_ix, _)) = bindings.first() + && *binding_ix > ix + { + continue; + } + if is_no_action(&*binding.action) { + pending.remove(&&binding.keystrokes); + continue; + } + pending.insert(&binding.keystrokes); + } + + (bindings, !pending.is_empty()) } /// Check if the given binding is enabled, given a certain key context. @@ -302,6 +275,30 @@ mod tests { ); } + #[test] + fn test_depth_precedence() { + let bindings = [ + KeyBinding::new("ctrl-a", ActionBeta {}, Some("pane")), + KeyBinding::new("ctrl-a", ActionGamma {}, Some("editor")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-a").unwrap()], + &[ + KeyContext::parse("pane").unwrap(), + KeyContext::parse("editor").unwrap(), + ], + ); + + assert!(!pending); + assert_eq!(result.len(), 2); + assert!(result[0].action.partial_eq(&ActionGamma {})); + assert!(result[1].action.partial_eq(&ActionBeta {})); + } + #[test] fn test_keymap_disabled() { let bindings = [ @@ -453,6 +450,193 @@ mod tests { assert_eq!(space_editor.1, true); } + #[test] + fn test_override_multikey() { + let bindings = [ + KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")), + KeyBinding::new("ctrl-w", NoAction {}, Some("editor")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + // Ensure `space` results in pending input on the workspace, but not editor + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-w").unwrap()], + &[KeyContext::parse("editor").unwrap()], + ); + assert!(result.is_empty()); + assert_eq!(pending, true); + + let bindings = [ + KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")), + KeyBinding::new("ctrl-w", ActionBeta {}, Some("editor")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + // Ensure `space` results in pending input on the workspace, but not editor + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-w").unwrap()], + &[KeyContext::parse("editor").unwrap()], + ); + assert_eq!(result.len(), 1); + assert_eq!(pending, false); + } + + #[test] + fn test_simple_disable() { + let bindings = [ + KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")), + KeyBinding::new("ctrl-x", NoAction {}, Some("editor")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + // Ensure `space` results in pending input on the workspace, but not editor + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x").unwrap()], + &[KeyContext::parse("editor").unwrap()], + ); + assert!(result.is_empty()); + assert_eq!(pending, false); + } + + #[test] + fn test_fail_to_disable() { + // disabled at the wrong level + let bindings = [ + KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")), + KeyBinding::new("ctrl-x", NoAction {}, Some("workspace")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + // Ensure `space` results in pending input on the workspace, but not editor + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x").unwrap()], + &[ + KeyContext::parse("workspace").unwrap(), + KeyContext::parse("editor").unwrap(), + ], + ); + assert_eq!(result.len(), 1); + assert_eq!(pending, false); + } + + #[test] + fn test_disable_deeper() { + let bindings = [ + KeyBinding::new("ctrl-x", ActionAlpha {}, Some("workspace")), + KeyBinding::new("ctrl-x", NoAction {}, Some("editor")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + // Ensure `space` results in pending input on the workspace, but not editor + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x").unwrap()], + &[ + KeyContext::parse("workspace").unwrap(), + KeyContext::parse("editor").unwrap(), + ], + ); + assert_eq!(result.len(), 0); + assert_eq!(pending, false); + } + + #[test] + fn test_pending_match_enabled() { + let bindings = [ + KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")), + KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")), + ]; + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + let matched = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x")].map(Result::unwrap), + &[ + KeyContext::parse("Workspace"), + KeyContext::parse("Pane"), + KeyContext::parse("Editor vim_mode=normal"), + ] + .map(Result::unwrap), + ); + assert_eq!(matched.0.len(), 1); + assert!(matched.0[0].action.partial_eq(&ActionBeta)); + assert!(matched.1); + } + + #[test] + fn test_pending_match_enabled_extended() { + let bindings = [ + KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")), + KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")), + ]; + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + let matched = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x")].map(Result::unwrap), + &[ + KeyContext::parse("Workspace"), + KeyContext::parse("Pane"), + KeyContext::parse("Editor vim_mode=normal"), + ] + .map(Result::unwrap), + ); + assert_eq!(matched.0.len(), 1); + assert!(matched.0[0].action.partial_eq(&ActionBeta)); + assert!(!matched.1); + let bindings = [ + KeyBinding::new("ctrl-x", ActionBeta, Some("Workspace")), + KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")), + ]; + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + let matched = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x")].map(Result::unwrap), + &[ + KeyContext::parse("Workspace"), + KeyContext::parse("Pane"), + KeyContext::parse("Editor vim_mode=normal"), + ] + .map(Result::unwrap), + ); + assert_eq!(matched.0.len(), 1); + assert!(matched.0[0].action.partial_eq(&ActionBeta)); + assert!(!matched.1); + } + + #[test] + fn test_overriding_prefix() { + let bindings = [ + KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")), + KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")), + ]; + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings.clone()); + + let matched = keymap.bindings_for_input( + &[Keystroke::parse("ctrl-x")].map(Result::unwrap), + &[ + KeyContext::parse("Workspace"), + KeyContext::parse("Pane"), + KeyContext::parse("Editor vim_mode=normal"), + ] + .map(Result::unwrap), + ); + assert_eq!(matched.0.len(), 1); + assert!(matched.0[0].action.partial_eq(&ActionBeta)); + assert!(!matched.1); + } + #[test] fn test_bindings_for_action() { let bindings = [ From 2eeab5b0bf9921814c9d8a1824a3d09d3f5397f4 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 22 Jul 2025 10:52:04 -0400 Subject: [PATCH 05/25] textmate: Correct context for 'Editor && mode == full' keybinds (#34895) Closes https://github.com/zed-industries/zed/issues/34891 Release Notes: - Fixed textmate keymap misbehaving in certain contexts --- assets/keymaps/macos/textmate.json | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/assets/keymaps/macos/textmate.json b/assets/keymaps/macos/textmate.json index dccb675f6c..0bd8873b17 100644 --- a/assets/keymaps/macos/textmate.json +++ b/assets/keymaps/macos/textmate.json @@ -6,7 +6,7 @@ } }, { - "context": "Editor", + "context": "Editor && mode == full", "bindings": { "cmd-l": "go_to_line::Toggle", "ctrl-shift-d": "editor::DuplicateLineDown", @@ -15,7 +15,12 @@ "cmd-enter": "editor::NewlineBelow", "cmd-alt-enter": "editor::NewlineAbove", "cmd-shift-l": "editor::SelectLine", - "cmd-shift-t": "outline::Toggle", + "cmd-shift-t": "outline::Toggle" + } + }, + { + "context": "Editor", + "bindings": { "alt-backspace": "editor::DeleteToPreviousWordStart", "alt-shift-backspace": "editor::DeleteToNextWordEnd", "alt-delete": "editor::DeleteToNextWordEnd", @@ -39,10 +44,6 @@ "ctrl-_": "editor::ConvertToSnakeCase" } }, - { - "context": "Editor && mode == full", - "bindings": {} - }, { "context": "BufferSearchBar", "bindings": { From 1a76a6b0bfc5145cf51355ff7777b0413063a868 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 22 Jul 2025 09:59:51 -0500 Subject: [PATCH 06/25] gpui: Simplify `bindings_for_action` API (#34857) Closes #ISSUE Simplifies the API to no longer have a variant that returns indices. The downside is that a few places that used to call `bindings_for_action_with_indices` now compare `Box` instead of indices, however the result is the removal of wrapper code and index handling that is largely unnecessary Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored-by: Conrad --- crates/gpui/src/key_dispatch.rs | 39 +++++++++--------------- crates/gpui/src/keymap.rs | 53 +++++++++++---------------------- 2 files changed, 31 insertions(+), 61 deletions(-) diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index a290a132c3..cc6ebb9b08 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -50,8 +50,8 @@ /// KeyBinding::new("cmd-k left", pane::SplitLeft, Some("Pane")) /// use crate::{ - Action, ActionRegistry, App, BindingIndex, DispatchPhase, EntityId, FocusId, KeyBinding, - KeyContext, Keymap, Keystroke, ModifiersChangedEvent, Window, + Action, ActionRegistry, App, DispatchPhase, EntityId, FocusId, KeyBinding, KeyContext, Keymap, + Keystroke, ModifiersChangedEvent, Window, }; use collections::FxHashMap; use smallvec::SmallVec; @@ -406,16 +406,11 @@ impl DispatchTree { // methods, but this can't be done very cleanly since keymap must be borrowed. let keymap = self.keymap.borrow(); keymap - .bindings_for_action_with_indices(action) - .filter(|(binding_index, binding)| { - Self::binding_matches_predicate_and_not_shadowed( - &keymap, - *binding_index, - &binding.keystrokes, - context_stack, - ) + .bindings_for_action(action) + .filter(|binding| { + Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack) }) - .map(|(_, binding)| binding.clone()) + .cloned() .collect() } @@ -428,28 +423,22 @@ impl DispatchTree { ) -> Option { let keymap = self.keymap.borrow(); keymap - .bindings_for_action_with_indices(action) + .bindings_for_action(action) .rev() - .find_map(|(binding_index, binding)| { - let found = Self::binding_matches_predicate_and_not_shadowed( - &keymap, - binding_index, - &binding.keystrokes, - context_stack, - ); - if found { Some(binding.clone()) } else { None } + .find(|binding| { + Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack) }) + .cloned() } fn binding_matches_predicate_and_not_shadowed( keymap: &Keymap, - binding_index: BindingIndex, - keystrokes: &[Keystroke], + binding: &KeyBinding, context_stack: &[KeyContext], ) -> bool { - let (bindings, _) = keymap.bindings_for_input_with_indices(&keystrokes, context_stack); - if let Some((highest_precedence_index, _)) = bindings.iter().next() { - binding_index == *highest_precedence_index + let (bindings, _) = keymap.bindings_for_input(&binding.keystrokes, context_stack); + if let Some(found) = bindings.iter().next() { + found.action.partial_eq(binding.action.as_ref()) } else { false } diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 69700e64dc..83d7479a04 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -77,15 +77,6 @@ impl Keymap { &'a self, action: &'a dyn Action, ) -> impl 'a + DoubleEndedIterator { - self.bindings_for_action_with_indices(action) - .map(|(_, binding)| binding) - } - - /// Like `bindings_for_action_with_indices`, but also returns the binding indices. - pub fn bindings_for_action_with_indices<'a>( - &'a self, - action: &'a dyn Action, - ) -> impl 'a + DoubleEndedIterator { let action_id = action.type_id(); let binding_indices = self .binding_indices_by_action_id @@ -118,7 +109,7 @@ impl Keymap { } } - Some((BindingIndex(*ix), binding)) + Some(binding) }) } @@ -153,22 +144,8 @@ impl Keymap { input: &[Keystroke], context_stack: &[KeyContext], ) -> (SmallVec<[KeyBinding; 1]>, bool) { - let (bindings, pending) = self.bindings_for_input_with_indices(input, context_stack); - let bindings = bindings - .into_iter() - .map(|(_, binding)| binding) - .collect::>(); - (bindings, pending) - } - - /// Like `bindings_for_input`, but also returns the binding indices. - pub fn bindings_for_input_with_indices( - &self, - input: &[Keystroke], - context_stack: &[KeyContext], - ) -> (SmallVec<[(BindingIndex, KeyBinding); 1]>, bool) { - let mut bindings: SmallVec<[(usize, BindingIndex, &KeyBinding); 1]> = SmallVec::new(); - let mut pending_bindings: SmallVec<[(BindingIndex, &KeyBinding); 1]> = SmallVec::new(); + let mut matched_bindings = SmallVec::<[(usize, BindingIndex, &KeyBinding); 1]>::new(); + let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new(); for (ix, binding) in self.bindings().enumerate().rev() { let Some(depth) = self.binding_enabled(binding, &context_stack) else { @@ -179,26 +156,30 @@ impl Keymap { }; if !pending { - bindings.push((depth, BindingIndex(ix), binding)) + matched_bindings.push((depth, BindingIndex(ix), binding)); } else { - pending_bindings.push((BindingIndex(ix), binding)) + pending_bindings.push((BindingIndex(ix), binding)); } } - bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| { + matched_bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| { depth_b.cmp(depth_a).then(ix_b.cmp(ix_a)) }); - let bindings: SmallVec<[_; 1]> = bindings - .into_iter() - .take_while(|(_, _, binding)| !is_no_action(&*binding.action)) - .map(|(_, ix, binding)| (ix, binding.clone())) - .collect(); + let mut bindings: SmallVec<[_; 1]> = SmallVec::new(); + let mut first_binding_index = None; + for (_, ix, binding) in matched_bindings { + if is_no_action(&*binding.action) { + break; + } + bindings.push(binding.clone()); + first_binding_index.get_or_insert(ix); + } let mut pending = HashSet::default(); for (ix, binding) in pending_bindings.into_iter().rev() { - if let Some((binding_ix, _)) = bindings.first() - && *binding_ix > ix + if let Some(binding_ix) = first_binding_index + && binding_ix > ix { continue; } From 230061a6cb79c93faf3e3a9ff880207dc6668e37 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 22 Jul 2025 17:20:07 +0200 Subject: [PATCH 07/25] Support multiple OpenAI compatible providers (#34212) TODO - [x] OpenAI Compatible API Icon - [x] Docs - [x] Link to docs in OpenAI provider section about configuring OpenAI API compatible providers Closes #33992 Related to #30010 Release Notes: - agent: Add support for adding multiple OpenAI API compatible providers --------- Co-authored-by: MrSubidubi Co-authored-by: Danilo Leal --- Cargo.lock | 4 +- assets/icons/ai_open_ai_compat.svg | 4 + assets/settings/default.json | 1 + crates/agent/src/thread.rs | 2 +- crates/agent_ui/Cargo.toml | 2 + crates/agent_ui/src/active_thread.rs | 4 +- crates/agent_ui/src/agent_configuration.rs | 55 +- .../add_llm_provider_modal.rs | 639 ++++++++++++++++++ .../src/assistant_context_tests.rs | 2 +- .../src/project_notifications_tool.rs | 2 +- crates/icons/src/icons.rs | 1 + crates/language_model/src/fake_provider.rs | 55 +- crates/language_model/src/language_model.rs | 12 + crates/language_model/src/registry.rs | 9 +- crates/language_models/Cargo.toml | 2 +- crates/language_models/src/language_models.rs | 72 +- crates/language_models/src/provider.rs | 1 + .../language_models/src/provider/open_ai.rs | 192 ++---- .../src/provider/open_ai_compatible.rs | 522 ++++++++++++++ crates/language_models/src/settings.rs | 27 +- crates/ui/src/components/modal.rs | 16 +- crates/ui_input/src/ui_input.rs | 4 + docs/src/ai/configuration.md | 13 +- 23 files changed, 1450 insertions(+), 191 deletions(-) create mode 100644 assets/icons/ai_open_ai_compat.svg create mode 100644 crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs create mode 100644 crates/language_models/src/provider/open_ai_compatible.rs diff --git a/Cargo.lock b/Cargo.lock index ad6c40bcf2..c7297e6d59 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -231,6 +231,7 @@ dependencies = [ "jsonschema", "language", "language_model", + "language_models", "languages", "log", "lsp", @@ -269,6 +270,7 @@ dependencies = [ "time_format", "tree-sitter-md", "ui", + "ui_input", "unindent", "urlencoding", "util", @@ -9097,11 +9099,11 @@ dependencies = [ "client", "collections", "component", + "convert_case 0.8.0", "copilot", "credentials_provider", "deepseek", "editor", - "fs", "futures 0.3.31", "google_ai", "gpui", diff --git a/assets/icons/ai_open_ai_compat.svg b/assets/icons/ai_open_ai_compat.svg new file mode 100644 index 0000000000..f6557caac3 --- /dev/null +++ b/assets/icons/ai_open_ai_compat.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/settings/default.json b/assets/settings/default.json index 358871650b..309afaccf5 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1712,6 +1712,7 @@ "openai": { "api_url": "https://api.openai.com/v1" }, + "openai_compatible": {}, "open_router": { "api_url": "https://openrouter.ai/api/v1" }, diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index f8a7827615..1b3b022ab2 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -5490,7 +5490,7 @@ fn main() {{ let thread = thread_store.update(cx, |store, cx| store.create_thread(cx)); let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None)); - let provider = Arc::new(FakeLanguageModelProvider); + let provider = Arc::new(FakeLanguageModelProvider::default()); let model = provider.test_model(); let model: Arc = Arc::new(model); diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index e55ae86fb7..33042c0ebd 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -53,6 +53,7 @@ itertools.workspace = true jsonschema.workspace = true language.workspace = true language_model.workspace = true +language_models.workspace = true log.workspace = true lsp.workspace = true markdown.workspace = true @@ -87,6 +88,7 @@ theme.workspace = true time.workspace = true time_format.workspace = true ui.workspace = true +ui_input.workspace = true urlencoding.workspace = true util.workspace = true uuid.workspace = true diff --git a/crates/agent_ui/src/active_thread.rs b/crates/agent_ui/src/active_thread.rs index bfed81f5b7..e27c318221 100644 --- a/crates/agent_ui/src/active_thread.rs +++ b/crates/agent_ui/src/active_thread.rs @@ -3895,7 +3895,7 @@ mod tests { LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.set_default_model( Some(ConfiguredModel { - provider: Arc::new(FakeLanguageModelProvider), + provider: Arc::new(FakeLanguageModelProvider::default()), model, }), cx, @@ -3979,7 +3979,7 @@ mod tests { LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.set_default_model( Some(ConfiguredModel { - provider: Arc::new(FakeLanguageModelProvider), + provider: Arc::new(FakeLanguageModelProvider::default()), model: model.clone(), }), cx, diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index b5ad6ba37c..334c5ee6dc 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -1,3 +1,4 @@ +mod add_llm_provider_modal; mod configure_context_server_modal; mod manage_profiles_modal; mod tool_picker; @@ -37,7 +38,10 @@ use zed_actions::ExtensionCategoryFilter; pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use manage_profiles_modal::ManageProfilesModal; -use crate::AddContextServer; +use crate::{ + AddContextServer, + agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider}, +}; pub struct AgentConfiguration { fs: Arc, @@ -304,16 +308,55 @@ impl AgentConfiguration { v_flex() .child( - v_flex() + h_flex() .p(DynamicSpacing::Base16.rems(cx)) .pr(DynamicSpacing::Base20.rems(cx)) .pb_0() .mb_2p5() - .gap_0p5() - .child(Headline::new("LLM Providers")) + .items_start() + .justify_between() .child( - Label::new("Add at least one provider to use AI-powered features.") - .color(Color::Muted), + v_flex() + .gap_0p5() + .child(Headline::new("LLM Providers")) + .child( + Label::new("Add at least one provider to use AI-powered features.") + .color(Color::Muted), + ), + ) + .child( + PopoverMenu::new("add-provider-popover") + .trigger( + Button::new("add-provider", "Add Provider") + .icon_position(IconPosition::Start) + .icon(IconName::Plus) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small), + ) + .anchor(gpui::Corner::TopRight) + .menu({ + let workspace = self.workspace.clone(); + move |window, cx| { + Some(ContextMenu::build(window, cx, |menu, _window, _cx| { + menu.header("Compatible APIs").entry("OpenAI", None, { + let workspace = workspace.clone(); + move |window, cx| { + workspace + .update(cx, |workspace, cx| { + AddLlmProviderModal::toggle( + LlmCompatibleProvider::OpenAi, + workspace, + window, + cx, + ); + }) + .log_err(); + } + }) + })) + } + }), ), ) .child( diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs new file mode 100644 index 0000000000..94b32d156b --- /dev/null +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -0,0 +1,639 @@ +use std::sync::Arc; + +use anyhow::Result; +use collections::HashSet; +use fs::Fs; +use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task}; +use language_model::LanguageModelRegistry; +use language_models::{ + AllLanguageModelSettings, OpenAiCompatibleSettingsContent, + provider::open_ai_compatible::AvailableModel, +}; +use settings::update_settings_file; +use ui::{Banner, KeyBinding, Modal, ModalFooter, ModalHeader, Section, prelude::*}; +use ui_input::SingleLineInput; +use workspace::{ModalView, Workspace}; + +#[derive(Clone, Copy)] +pub enum LlmCompatibleProvider { + OpenAi, +} + +impl LlmCompatibleProvider { + fn name(&self) -> &'static str { + match self { + LlmCompatibleProvider::OpenAi => "OpenAI", + } + } + + fn api_url(&self) -> &'static str { + match self { + LlmCompatibleProvider::OpenAi => "https://api.openai.com/v1", + } + } +} + +struct AddLlmProviderInput { + provider_name: Entity, + api_url: Entity, + api_key: Entity, + models: Vec, +} + +impl AddLlmProviderInput { + fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self { + let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx); + let api_url = single_line_input("API URL", provider.api_url(), None, window, cx); + let api_key = single_line_input( + "API Key", + "000000000000000000000000000000000000000000000000", + None, + window, + cx, + ); + + Self { + provider_name, + api_url, + api_key, + models: vec![ModelInput::new(window, cx)], + } + } + + fn add_model(&mut self, window: &mut Window, cx: &mut App) { + self.models.push(ModelInput::new(window, cx)); + } + + fn remove_model(&mut self, index: usize) { + self.models.remove(index); + } +} + +struct ModelInput { + name: Entity, + max_completion_tokens: Entity, + max_output_tokens: Entity, + max_tokens: Entity, +} + +impl ModelInput { + fn new(window: &mut Window, cx: &mut App) -> Self { + let model_name = single_line_input( + "Model Name", + "e.g. gpt-4o, claude-opus-4, gemini-2.5-pro", + None, + window, + cx, + ); + let max_completion_tokens = single_line_input( + "Max Completion Tokens", + "200000", + Some("200000"), + window, + cx, + ); + let max_output_tokens = single_line_input( + "Max Output Tokens", + "Max Output Tokens", + Some("32000"), + window, + cx, + ); + let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx); + Self { + name: model_name, + max_completion_tokens, + max_output_tokens, + max_tokens, + } + } + + fn parse(&self, cx: &App) -> Result { + let name = self.name.read(cx).text(cx); + if name.is_empty() { + return Err(SharedString::from("Model Name cannot be empty")); + } + Ok(AvailableModel { + name, + display_name: None, + max_completion_tokens: Some( + self.max_completion_tokens + .read(cx) + .text(cx) + .parse::() + .map_err(|_| SharedString::from("Max Completion Tokens must be a number"))?, + ), + max_output_tokens: Some( + self.max_output_tokens + .read(cx) + .text(cx) + .parse::() + .map_err(|_| SharedString::from("Max Output Tokens must be a number"))?, + ), + max_tokens: self + .max_tokens + .read(cx) + .text(cx) + .parse::() + .map_err(|_| SharedString::from("Max Tokens must be a number"))?, + }) + } +} + +fn single_line_input( + label: impl Into, + placeholder: impl Into, + text: Option<&str>, + window: &mut Window, + cx: &mut App, +) -> Entity { + cx.new(|cx| { + let input = SingleLineInput::new(window, cx, placeholder).label(label); + if let Some(text) = text { + input + .editor() + .update(cx, |editor, cx| editor.set_text(text, window, cx)); + } + input + }) +} + +fn save_provider_to_settings( + input: &AddLlmProviderInput, + cx: &mut App, +) -> Task> { + let provider_name: Arc = input.provider_name.read(cx).text(cx).into(); + if provider_name.is_empty() { + return Task::ready(Err("Provider Name cannot be empty".into())); + } + + if LanguageModelRegistry::read_global(cx) + .providers() + .iter() + .any(|provider| { + provider.id().0.as_ref() == provider_name.as_ref() + || provider.name().0.as_ref() == provider_name.as_ref() + }) + { + return Task::ready(Err( + "Provider Name is already taken by another provider".into() + )); + } + + let api_url = input.api_url.read(cx).text(cx); + if api_url.is_empty() { + return Task::ready(Err("API URL cannot be empty".into())); + } + + let api_key = input.api_key.read(cx).text(cx); + if api_key.is_empty() { + return Task::ready(Err("API Key cannot be empty".into())); + } + + let mut models = Vec::new(); + let mut model_names: HashSet = HashSet::default(); + for model in &input.models { + match model.parse(cx) { + Ok(model) => { + if !model_names.insert(model.name.clone()) { + return Task::ready(Err("Model Names must be unique".into())); + } + models.push(model) + } + Err(err) => return Task::ready(Err(err)), + } + } + + let fs = ::global(cx); + let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes()); + cx.spawn(async move |cx| { + task.await + .map_err(|_| "Failed to write API key to keychain")?; + cx.update(|cx| { + update_settings_file::(fs, cx, |settings, _cx| { + settings.openai_compatible.get_or_insert_default().insert( + provider_name, + OpenAiCompatibleSettingsContent { + api_url, + available_models: models, + }, + ); + }); + }) + .ok(); + Ok(()) + }) +} + +pub struct AddLlmProviderModal { + provider: LlmCompatibleProvider, + input: AddLlmProviderInput, + focus_handle: FocusHandle, + last_error: Option, +} + +impl AddLlmProviderModal { + pub fn toggle( + provider: LlmCompatibleProvider, + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + workspace.toggle_modal(window, cx, |window, cx| Self::new(provider, window, cx)); + } + + fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut Context) -> Self { + Self { + input: AddLlmProviderInput::new(provider, window, cx), + provider, + last_error: None, + focus_handle: cx.focus_handle(), + } + } + + fn confirm(&mut self, _: &menu::Confirm, _: &mut Window, cx: &mut Context) { + let task = save_provider_to_settings(&self.input, cx); + cx.spawn(async move |this, cx| { + let result = task.await; + this.update(cx, |this, cx| match result { + Ok(_) => { + cx.emit(DismissEvent); + } + Err(error) => { + this.last_error = Some(error); + cx.notify(); + } + }) + }) + .detach_and_log_err(cx); + } + + fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { + cx.emit(DismissEvent); + } + + fn render_section(&self) -> Section { + Section::new() + .child(self.input.provider_name.clone()) + .child(self.input.api_url.clone()) + .child(self.input.api_key.clone()) + } + + fn render_model_section(&self, cx: &mut Context) -> Section { + Section::new().child( + v_flex() + .gap_2() + .child( + h_flex() + .justify_between() + .child(Label::new("Models").size(LabelSize::Small)) + .child( + Button::new("add-model", "Add Model") + .icon(IconName::Plus) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.input.add_model(window, cx); + cx.notify(); + })), + ), + ) + .children( + self.input + .models + .iter() + .enumerate() + .map(|(ix, _)| self.render_model(ix, cx)), + ), + ) + } + + fn render_model(&self, ix: usize, cx: &mut Context) -> impl IntoElement + use<> { + let has_more_than_one_model = self.input.models.len() > 1; + let model = &self.input.models[ix]; + + v_flex() + .p_2() + .gap_2() + .rounded_sm() + .border_1() + .border_dashed() + .border_color(cx.theme().colors().border.opacity(0.6)) + .bg(cx.theme().colors().element_active.opacity(0.15)) + .child(model.name.clone()) + .child( + h_flex() + .gap_2() + .child(model.max_completion_tokens.clone()) + .child(model.max_output_tokens.clone()), + ) + .child(model.max_tokens.clone()) + .when(has_more_than_one_model, |this| { + this.child( + Button::new(("remove-model", ix), "Remove Model") + .icon(IconName::Trash) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .label_size(LabelSize::Small) + .style(ButtonStyle::Outlined) + .full_width() + .on_click(cx.listener(move |this, _, _window, cx| { + this.input.remove_model(ix); + cx.notify(); + })), + ) + }) + } +} + +impl EventEmitter for AddLlmProviderModal {} + +impl Focusable for AddLlmProviderModal { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl ModalView for AddLlmProviderModal {} + +impl Render for AddLlmProviderModal { + fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); + + div() + .id("add-llm-provider-modal") + .key_context("AddLlmProviderModal") + .w(rems(34.)) + .elevation_3(cx) + .on_action(cx.listener(Self::cancel)) + .capture_any_mouse_down(cx.listener(|this, _, window, cx| { + this.focus_handle(cx).focus(window); + })) + .child( + Modal::new("configure-context-server", None) + .header(ModalHeader::new().headline("Add LLM Provider").description( + match self.provider { + LlmCompatibleProvider::OpenAi => { + "This provider will use an OpenAI compatible API." + } + }, + )) + .when_some(self.last_error.clone(), |this, error| { + this.section( + Section::new().child( + Banner::new() + .severity(ui::Severity::Warning) + .child(div().text_xs().child(error)), + ), + ) + }) + .child( + v_flex() + .id("modal_content") + .max_h_128() + .overflow_y_scroll() + .gap_2() + .child(self.render_section()) + .child(self.render_model_section(cx)), + ) + .footer( + ModalFooter::new().end_slot( + h_flex() + .gap_1() + .child( + Button::new("cancel", "Cancel") + .key_binding( + KeyBinding::for_action_in( + &menu::Cancel, + &focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _event, window, cx| { + this.cancel(&menu::Cancel, window, cx) + })), + ) + .child( + Button::new("save-server", "Save Provider") + .key_binding( + KeyBinding::for_action_in( + &menu::Confirm, + &focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _event, window, cx| { + this.confirm(&menu::Confirm, window, cx) + })), + ), + ), + ), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use editor::EditorSettings; + use fs::FakeFs; + use gpui::{TestAppContext, VisualTestContext}; + use language::language_settings; + use language_model::{ + LanguageModelProviderId, LanguageModelProviderName, + fake_provider::FakeLanguageModelProvider, + }; + use project::Project; + use settings::{Settings as _, SettingsStore}; + use util::path; + + #[gpui::test] + async fn test_save_provider_invalid_inputs(cx: &mut TestAppContext) { + let cx = setup_test(cx).await; + + assert_eq!( + save_provider_validation_errors("", "someurl", "somekey", vec![], cx,).await, + Some("Provider Name cannot be empty".into()) + ); + + assert_eq!( + save_provider_validation_errors("someprovider", "", "somekey", vec![], cx,).await, + Some("API URL cannot be empty".into()) + ); + + assert_eq!( + save_provider_validation_errors("someprovider", "someurl", "", vec![], cx,).await, + Some("API Key cannot be empty".into()) + ); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "somekey", + vec![("", "200000", "200000", "32000")], + cx, + ) + .await, + Some("Model Name cannot be empty".into()) + ); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "somekey", + vec![("somemodel", "abc", "200000", "32000")], + cx, + ) + .await, + Some("Max Tokens must be a number".into()) + ); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "somekey", + vec![("somemodel", "200000", "abc", "32000")], + cx, + ) + .await, + Some("Max Completion Tokens must be a number".into()) + ); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "somekey", + vec![("somemodel", "200000", "200000", "abc")], + cx, + ) + .await, + Some("Max Output Tokens must be a number".into()) + ); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "somekey", + vec![ + ("somemodel", "200000", "200000", "32000"), + ("somemodel", "200000", "200000", "32000"), + ], + cx, + ) + .await, + Some("Model Names must be unique".into()) + ); + } + + #[gpui::test] + async fn test_save_provider_name_conflict(cx: &mut TestAppContext) { + let cx = setup_test(cx).await; + + cx.update(|_window, cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.register_provider( + FakeLanguageModelProvider::new( + LanguageModelProviderId::new("someprovider"), + LanguageModelProviderName::new("Some Provider"), + ), + cx, + ); + }); + }); + + assert_eq!( + save_provider_validation_errors( + "someprovider", + "someurl", + "someapikey", + vec![("somemodel", "200000", "200000", "32000")], + cx, + ) + .await, + Some("Provider Name is already taken by another provider".into()) + ); + } + + async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext { + cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + workspace::init_settings(cx); + Project::init_settings(cx); + theme::init(theme::LoadThemes::JustBase, cx); + language_settings::init(cx); + EditorSettings::register(cx); + language_model::init_settings(cx); + language_models::init_settings(cx); + }); + + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + let (_, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + cx + } + + async fn save_provider_validation_errors( + provider_name: &str, + api_url: &str, + api_key: &str, + models: Vec<(&str, &str, &str, &str)>, + cx: &mut VisualTestContext, + ) -> Option { + fn set_text( + input: &Entity, + text: &str, + window: &mut Window, + cx: &mut App, + ) { + input.update(cx, |input, cx| { + input.editor().update(cx, |editor, cx| { + editor.set_text(text, window, cx); + }); + }); + } + + let task = cx.update(|window, cx| { + let mut input = AddLlmProviderInput::new(LlmCompatibleProvider::OpenAi, window, cx); + set_text(&input.provider_name, provider_name, window, cx); + set_text(&input.api_url, api_url, window, cx); + set_text(&input.api_key, api_key, window, cx); + + for (i, (name, max_tokens, max_completion_tokens, max_output_tokens)) in + models.iter().enumerate() + { + if i >= input.models.len() { + input.models.push(ModelInput::new(window, cx)); + } + let model = &mut input.models[i]; + set_text(&model.name, name, window, cx); + set_text(&model.max_tokens, max_tokens, window, cx); + set_text( + &model.max_completion_tokens, + max_completion_tokens, + window, + cx, + ); + set_text(&model.max_output_tokens, max_output_tokens, window, cx); + } + save_provider_to_settings(&input, cx) + }); + + task.await.err() + } +} diff --git a/crates/assistant_context/src/assistant_context_tests.rs b/crates/assistant_context/src/assistant_context_tests.rs index dba3bfde61..f139d525d3 100644 --- a/crates/assistant_context/src/assistant_context_tests.rs +++ b/crates/assistant_context/src/assistant_context_tests.rs @@ -1323,7 +1323,7 @@ fn setup_context_editor_with_fake_model( ) -> (Entity, Arc) { let registry = Arc::new(LanguageRegistry::test(cx.executor().clone())); - let fake_provider = Arc::new(FakeLanguageModelProvider); + let fake_provider = Arc::new(FakeLanguageModelProvider::default()); let fake_model = Arc::new(fake_provider.test_model()); cx.update(|cx| { diff --git a/crates/assistant_tools/src/project_notifications_tool.rs b/crates/assistant_tools/src/project_notifications_tool.rs index ec315d9ab1..7567926dca 100644 --- a/crates/assistant_tools/src/project_notifications_tool.rs +++ b/crates/assistant_tools/src/project_notifications_tool.rs @@ -200,7 +200,7 @@ mod tests { // Run the tool before any changes let tool = Arc::new(ProjectNotificationsTool); - let provider = Arc::new(FakeLanguageModelProvider); + let provider = Arc::new(FakeLanguageModelProvider::default()); let model: Arc = Arc::new(provider.test_model()); let request = Arc::new(LanguageModelRequest::default()); let tool_input = json!({}); diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index b85e5b517d..e7066ae151 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -20,6 +20,7 @@ pub enum IconName { AiMistral, AiOllama, AiOpenAi, + AiOpenAiCompat, AiOpenRouter, AiVZero, AiXAi, diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index f5191016d8..d54db7554a 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -10,25 +10,21 @@ use http_client::Result; use parking_lot::Mutex; use std::sync::Arc; -pub fn language_model_id() -> LanguageModelId { - LanguageModelId::from("fake".to_string()) +#[derive(Clone)] +pub struct FakeLanguageModelProvider { + id: LanguageModelProviderId, + name: LanguageModelProviderName, } -pub fn language_model_name() -> LanguageModelName { - LanguageModelName::from("Fake".to_string()) +impl Default for FakeLanguageModelProvider { + fn default() -> Self { + Self { + id: LanguageModelProviderId::from("fake".to_string()), + name: LanguageModelProviderName::from("Fake".to_string()), + } + } } -pub fn provider_id() -> LanguageModelProviderId { - LanguageModelProviderId::from("fake".to_string()) -} - -pub fn provider_name() -> LanguageModelProviderName { - LanguageModelProviderName::from("Fake".to_string()) -} - -#[derive(Clone, Default)] -pub struct FakeLanguageModelProvider; - impl LanguageModelProviderState for FakeLanguageModelProvider { type ObservableEntity = (); @@ -39,11 +35,11 @@ impl LanguageModelProviderState for FakeLanguageModelProvider { impl LanguageModelProvider for FakeLanguageModelProvider { fn id(&self) -> LanguageModelProviderId { - provider_id() + self.id.clone() } fn name(&self) -> LanguageModelProviderName { - provider_name() + self.name.clone() } fn default_model(&self, _cx: &App) -> Option> { @@ -76,6 +72,10 @@ impl LanguageModelProvider for FakeLanguageModelProvider { } impl FakeLanguageModelProvider { + pub fn new(id: LanguageModelProviderId, name: LanguageModelProviderName) -> Self { + Self { id, name } + } + pub fn test_model(&self) -> FakeLanguageModel { FakeLanguageModel::default() } @@ -89,11 +89,22 @@ pub struct ToolUseRequest { pub schema: serde_json::Value, } -#[derive(Default)] pub struct FakeLanguageModel { + provider_id: LanguageModelProviderId, + provider_name: LanguageModelProviderName, current_completion_txs: Mutex)>>, } +impl Default for FakeLanguageModel { + fn default() -> Self { + Self { + provider_id: LanguageModelProviderId::from("fake".to_string()), + provider_name: LanguageModelProviderName::from("Fake".to_string()), + current_completion_txs: Mutex::new(Vec::new()), + } + } +} + impl FakeLanguageModel { pub fn pending_completions(&self) -> Vec { self.current_completion_txs @@ -138,19 +149,19 @@ impl FakeLanguageModel { impl LanguageModel for FakeLanguageModel { fn id(&self) -> LanguageModelId { - language_model_id() + LanguageModelId::from("fake".to_string()) } fn name(&self) -> LanguageModelName { - language_model_name() + LanguageModelName::from("Fake".to_string()) } fn provider_id(&self) -> LanguageModelProviderId { - provider_id() + self.provider_id.clone() } fn provider_name(&self) -> LanguageModelProviderName { - provider_name() + self.provider_name.clone() } fn supports_tools(&self) -> bool { diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 72455b3821..54640419b6 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -735,6 +735,18 @@ impl From for LanguageModelProviderName { } } +impl From> for LanguageModelProviderId { + fn from(value: Arc) -> Self { + Self(SharedString::from(value)) + } +} + +impl From> for LanguageModelProviderName { + fn from(value: Arc) -> Self { + Self(SharedString::from(value)) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 6e8e8e9108..7cf071808a 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -125,7 +125,7 @@ impl LanguageModelRegistry { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut App) -> crate::fake_provider::FakeLanguageModelProvider { - let fake_provider = crate::fake_provider::FakeLanguageModelProvider; + let fake_provider = crate::fake_provider::FakeLanguageModelProvider::default(); let registry = cx.new(|cx| { let mut registry = Self::default(); registry.register_provider(fake_provider.clone(), cx); @@ -403,16 +403,17 @@ mod tests { fn test_register_providers(cx: &mut App) { let registry = cx.new(|_| LanguageModelRegistry::default()); + let provider = FakeLanguageModelProvider::default(); registry.update(cx, |registry, cx| { - registry.register_provider(FakeLanguageModelProvider, cx); + registry.register_provider(provider.clone(), cx); }); let providers = registry.read(cx).providers(); assert_eq!(providers.len(), 1); - assert_eq!(providers[0].id(), crate::fake_provider::provider_id()); + assert_eq!(providers[0].id(), provider.id()); registry.update(cx, |registry, cx| { - registry.unregister_provider(crate::fake_provider::provider_id(), cx); + registry.unregister_provider(provider.id(), cx); }); let providers = registry.read(cx).providers(); diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index ed38ac7660..574579aaa7 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -26,10 +26,10 @@ client.workspace = true collections.workspace = true component.workspace = true credentials_provider.workspace = true +convert_case.workspace = true copilot.workspace = true deepseek = { workspace = true, features = ["schemars"] } editor.workspace = true -fs.workspace = true futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 192f5a5fae..18e6f47ed0 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -1,8 +1,10 @@ use std::sync::Arc; +use ::settings::{Settings, SettingsStore}; use client::{Client, UserStore}; +use collections::HashSet; use gpui::{App, Context, Entity}; -use language_model::LanguageModelRegistry; +use language_model::{LanguageModelProviderId, LanguageModelRegistry}; use provider::deepseek::DeepSeekLanguageModelProvider; pub mod provider; @@ -18,17 +20,81 @@ use crate::provider::lmstudio::LmStudioLanguageModelProvider; use crate::provider::mistral::MistralLanguageModelProvider; use crate::provider::ollama::OllamaLanguageModelProvider; use crate::provider::open_ai::OpenAiLanguageModelProvider; +use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; pub fn init(user_store: Entity, client: Arc, cx: &mut App) { - crate::settings::init(cx); + crate::settings::init_settings(cx); let registry = LanguageModelRegistry::global(cx); registry.update(cx, |registry, cx| { - register_language_model_providers(registry, user_store, client, cx); + register_language_model_providers(registry, user_store, client.clone(), cx); }); + + let mut openai_compatible_providers = AllLanguageModelSettings::get_global(cx) + .openai_compatible + .keys() + .cloned() + .collect::>(); + + registry.update(cx, |registry, cx| { + register_openai_compatible_providers( + registry, + &HashSet::default(), + &openai_compatible_providers, + client.clone(), + cx, + ); + }); + cx.observe_global::(move |cx| { + let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx) + .openai_compatible + .keys() + .cloned() + .collect::>(); + if openai_compatible_providers_new != openai_compatible_providers { + registry.update(cx, |registry, cx| { + register_openai_compatible_providers( + registry, + &openai_compatible_providers, + &openai_compatible_providers_new, + client.clone(), + cx, + ); + }); + openai_compatible_providers = openai_compatible_providers_new; + } + }) + .detach(); +} + +fn register_openai_compatible_providers( + registry: &mut LanguageModelRegistry, + old: &HashSet>, + new: &HashSet>, + client: Arc, + cx: &mut Context, +) { + for provider_id in old { + if !new.contains(provider_id) { + registry.unregister_provider(LanguageModelProviderId::from(provider_id.clone()), cx); + } + } + + for provider_id in new { + if !old.contains(provider_id) { + registry.register_provider( + OpenAiCompatibleLanguageModelProvider::new( + provider_id.clone(), + client.http_client(), + cx, + ), + cx, + ); + } + } } fn register_language_model_providers( diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index c717be7c90..d780195c66 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -8,6 +8,7 @@ pub mod lmstudio; pub mod mistral; pub mod ollama; pub mod open_ai; +pub mod open_ai_compatible; pub mod open_router; pub mod vercel; pub mod x_ai; diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 76f2fbe303..6c4d4c9b3e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -2,7 +2,6 @@ use anyhow::{Context as _, Result, anyhow}; use collections::{BTreeMap, HashMap}; use credentials_provider::CredentialsProvider; -use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window}; @@ -18,7 +17,7 @@ use menu; use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore, update_settings_file}; +use settings::{Settings, SettingsStore}; use std::pin::Pin; use std::str::FromStr as _; use std::sync::Arc; @@ -28,7 +27,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*}; use ui_input::SingleLineInput; use util::ResultExt; -use crate::OpenAiSettingsContent; use crate::{AllLanguageModelSettings, ui::InstructionListItem}; const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID; @@ -621,26 +619,32 @@ struct RawToolCall { arguments: String, } +pub(crate) fn collect_tiktoken_messages( + request: LanguageModelRequest, +) -> Vec { + request + .messages + .into_iter() + .map(|message| tiktoken_rs::ChatCompletionRequestMessage { + role: match message.role { + Role::User => "user".into(), + Role::Assistant => "assistant".into(), + Role::System => "system".into(), + }, + content: Some(message.string_contents()), + name: None, + function_call: None, + }) + .collect::>() +} + pub fn count_open_ai_tokens( request: LanguageModelRequest, model: Model, cx: &App, ) -> BoxFuture<'static, Result> { cx.background_spawn(async move { - let messages = request - .messages - .into_iter() - .map(|message| tiktoken_rs::ChatCompletionRequestMessage { - role: match message.role { - Role::User => "user".into(), - Role::Assistant => "assistant".into(), - Role::System => "system".into(), - }, - content: Some(message.string_contents()), - name: None, - function_call: None, - }) - .collect::>(); + let messages = collect_tiktoken_messages(request); match model { Model::Custom { max_tokens, .. } => { @@ -678,7 +682,6 @@ pub fn count_open_ai_tokens( struct ConfigurationView { api_key_editor: Entity, - api_url_editor: Entity, state: gpui::Entity, load_credentials_task: Option>, } @@ -691,23 +694,6 @@ impl ConfigurationView { cx, "sk-000000000000000000000000000000000000000000000000", ) - .label("API key") - }); - - let api_url = AllLanguageModelSettings::get_global(cx) - .openai - .api_url - .clone(); - - let api_url_editor = cx.new(|cx| { - let input = SingleLineInput::new(window, cx, open_ai::OPEN_AI_API_URL).label("API URL"); - - if !api_url.is_empty() { - input.editor.update(cx, |editor, cx| { - editor.set_text(&*api_url, window, cx); - }); - } - input }); cx.observe(&state, |_, _, cx| { @@ -735,7 +721,6 @@ impl ConfigurationView { Self { api_key_editor, - api_url_editor, state, load_credentials_task, } @@ -783,57 +768,6 @@ impl ConfigurationView { cx.notify(); } - fn save_api_url(&mut self, cx: &mut Context) { - let api_url = self - .api_url_editor - .read(cx) - .editor() - .read(cx) - .text(cx) - .trim() - .to_string(); - - let current_url = AllLanguageModelSettings::get_global(cx) - .openai - .api_url - .clone(); - - let effective_current_url = if current_url.is_empty() { - open_ai::OPEN_AI_API_URL - } else { - ¤t_url - }; - - if !api_url.is_empty() && api_url != effective_current_url { - let fs = ::global(cx); - update_settings_file::(fs, cx, move |settings, _| { - if let Some(settings) = settings.openai.as_mut() { - settings.api_url = Some(api_url.clone()); - } else { - settings.openai = Some(OpenAiSettingsContent { - api_url: Some(api_url.clone()), - available_models: None, - }); - } - }); - } - } - - fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context) { - self.api_url_editor.update(cx, |input, cx| { - input.editor.update(cx, |editor, cx| { - editor.set_text("", window, cx); - }); - }); - let fs = ::global(cx); - update_settings_file::(fs, cx, |settings, _cx| { - if let Some(settings) = settings.openai.as_mut() { - settings.api_url = None; - } - }); - cx.notify(); - } - fn should_render_editor(&self, cx: &mut Context) -> bool { !self.state.read(cx).is_authenticated() } @@ -846,7 +780,6 @@ impl Render for ConfigurationView { let api_key_section = if self.should_render_editor(cx) { v_flex() .on_action(cx.listener(Self::save_api_key)) - .child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:")) .child( List::new() @@ -910,59 +843,34 @@ impl Render for ConfigurationView { .into_any() }; - let custom_api_url_set = - AllLanguageModelSettings::get_global(cx).openai.api_url != open_ai::OPEN_AI_API_URL; - - let api_url_section = if custom_api_url_set { - h_flex() - .mt_1() - .p_1() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().background) - .child( - h_flex() - .gap_1() - .child(Icon::new(IconName::Check).color(Color::Success)) - .child(Label::new("Custom API URL configured.")), - ) - .child( - Button::new("reset-api-url", "Reset API URL") - .label_size(LabelSize::Small) - .icon(IconName::Undo) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .layer(ElevationIndex::ModalSurface) - .on_click( - cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)), - ), - ) - .into_any() - } else { - v_flex() - .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { - this.save_api_url(cx); - cx.notify(); - })) - .mt_2() - .pt_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .gap_1() - .child( - List::new() - .child(InstructionListItem::text_only( - "Optionally, you can change the base URL for the OpenAI API request.", - )) - .child(InstructionListItem::text_only( - "Paste the new API endpoint below and hit enter", - )), - ) - .child(self.api_url_editor.clone()) - .into_any() - }; + let compatible_api_section = h_flex() + .mt_1p5() + .gap_0p5() + .flex_wrap() + .when(self.should_render_editor(cx), |this| { + this.pt_1p5() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + }) + .child( + h_flex() + .gap_2() + .child( + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child(Label::new("Zed also supports OpenAI-compatible models.")), + ) + .child( + Button::new("docs", "Learn More") + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, _window, cx| { + cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible") + }), + ); if self.load_credentials_task.is_some() { div().child(Label::new("Loading credentials…")).into_any() @@ -970,7 +878,7 @@ impl Render for ConfigurationView { v_flex() .size_full() .child(api_key_section) - .child(api_url_section) + .child(compatible_api_section) .into_any() } } diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs new file mode 100644 index 0000000000..64add5483d --- /dev/null +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -0,0 +1,522 @@ +use anyhow::{Context as _, Result, anyhow}; +use credentials_provider::CredentialsProvider; + +use convert_case::{Case, Casing}; +use futures::{FutureExt, StreamExt, future::BoxFuture}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window}; +use http_client::HttpClient; +use language_model::{ + AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, + LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, + LanguageModelToolChoice, RateLimiter, +}; +use menu; +use open_ai::{ResponseStreamEvent, stream_completion}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; +use std::sync::Arc; + +use ui::{ElevationIndex, Tooltip, prelude::*}; +use ui_input::SingleLineInput; +use util::ResultExt; + +use crate::AllLanguageModelSettings; +use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai}; + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct OpenAiCompatibleSettings { + pub api_url: String, + pub available_models: Vec, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct AvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: u64, + pub max_output_tokens: Option, + pub max_completion_tokens: Option, +} + +pub struct OpenAiCompatibleLanguageModelProvider { + id: LanguageModelProviderId, + name: LanguageModelProviderName, + http_client: Arc, + state: gpui::Entity, +} + +pub struct State { + id: Arc, + env_var_name: Arc, + api_key: Option, + api_key_from_env: bool, + settings: OpenAiCompatibleSettings, + _subscription: Subscription, +} + +impl State { + fn is_authenticated(&self) -> bool { + self.api_key.is_some() + } + + fn reset_api_key(&self, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let api_url = self.settings.api_url.clone(); + cx.spawn(async move |this, cx| { + credentials_provider + .delete_credentials(&api_url, &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = None; + this.api_key_from_env = false; + cx.notify(); + }) + }) + } + + fn set_api_key(&mut self, api_key: String, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let api_url = self.settings.api_url.clone(); + cx.spawn(async move |this, cx| { + credentials_provider + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = Some(api_key); + cx.notify(); + }) + }) + } + + fn authenticate(&self, cx: &mut Context) -> Task> { + if self.is_authenticated() { + return Task::ready(Ok(())); + } + + let credentials_provider = ::global(cx); + let env_var_name = self.env_var_name.clone(); + let api_url = self.settings.api_url.clone(); + cx.spawn(async move |this, cx| { + let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) { + (api_key, true) + } else { + let (_, api_key) = credentials_provider + .read_credentials(&api_url, &cx) + .await? + .ok_or(AuthenticateError::CredentialsNotFound)?; + ( + String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, + false, + ) + }; + this.update(cx, |this, cx| { + this.api_key = Some(api_key); + this.api_key_from_env = from_env; + cx.notify(); + })?; + + Ok(()) + }) + } +} + +impl OpenAiCompatibleLanguageModelProvider { + pub fn new(id: Arc, http_client: Arc, cx: &mut App) -> Self { + fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> { + AllLanguageModelSettings::get_global(cx) + .openai_compatible + .get(id) + } + + let state = cx.new(|cx| State { + id: id.clone(), + env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(), + settings: resolve_settings(&id, cx).cloned().unwrap_or_default(), + api_key: None, + api_key_from_env: false, + _subscription: cx.observe_global::(|this: &mut State, cx| { + let Some(settings) = resolve_settings(&this.id, cx) else { + return; + }; + if &this.settings != settings { + this.settings = settings.clone(); + cx.notify(); + } + }), + }); + + Self { + id: id.clone().into(), + name: id.into(), + http_client, + state, + } + } + + fn create_language_model(&self, model: AvailableModel) -> Arc { + Arc::new(OpenAiCompatibleLanguageModel { + id: LanguageModelId::from(model.name.clone()), + provider_id: self.id.clone(), + provider_name: self.name.clone(), + model, + state: self.state.clone(), + http_client: self.http_client.clone(), + request_limiter: RateLimiter::new(4), + }) + } +} + +impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + self.id.clone() + } + + fn name(&self) -> LanguageModelProviderName { + self.name.clone() + } + + fn icon(&self) -> IconName { + IconName::AiOpenAiCompat + } + + fn default_model(&self, cx: &App) -> Option> { + self.state + .read(cx) + .settings + .available_models + .first() + .map(|model| self.create_language_model(model.clone())) + } + + fn default_fast_model(&self, _cx: &App) -> Option> { + None + } + + fn provided_models(&self, cx: &App) -> Vec> { + self.state + .read(cx) + .settings + .available_models + .iter() + .map(|model| self.create_language_model(model.clone())) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.reset_api_key(cx)) + } +} + +pub struct OpenAiCompatibleLanguageModel { + id: LanguageModelId, + provider_id: LanguageModelProviderId, + provider_name: LanguageModelProviderName, + model: AvailableModel, + state: gpui::Entity, + http_client: Arc, + request_limiter: RateLimiter, +} + +impl OpenAiCompatibleLanguageModel { + fn stream_completion( + &self, + request: open_ai::Request, + cx: &AsyncApp, + ) -> BoxFuture<'static, Result>>> + { + let http_client = self.http_client.clone(); + let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| { + (state.api_key.clone(), state.settings.api_url.clone()) + }) else { + return futures::future::ready(Err(anyhow!("App state dropped"))).boxed(); + }; + + let provider = self.provider_name.clone(); + let future = self.request_limiter.stream(async move { + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { provider }); + }; + let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } +} + +impl LanguageModel for OpenAiCompatibleLanguageModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from( + self.model + .display_name + .clone() + .unwrap_or_else(|| self.model.name.clone()), + ) + } + + fn provider_id(&self) -> LanguageModelProviderId { + self.provider_id.clone() + } + + fn provider_name(&self) -> LanguageModelProviderName { + self.provider_name.clone() + } + + fn supports_tools(&self) -> bool { + true + } + + fn supports_images(&self) -> bool { + false + } + + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { + match choice { + LanguageModelToolChoice::Auto => true, + LanguageModelToolChoice::Any => true, + LanguageModelToolChoice::None => true, + } + } + + fn telemetry_id(&self) -> String { + format!("openai/{}", self.model.name) + } + + fn max_token_count(&self) -> u64 { + self.model.max_tokens + } + + fn max_output_tokens(&self) -> Option { + self.model.max_output_tokens + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + let max_token_count = self.max_token_count(); + cx.background_spawn(async move { + let messages = super::open_ai::collect_tiktoken_messages(request); + let model = if max_token_count >= 100_000 { + // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o + "gpt-4o" + } else { + // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are + // supported with this tiktoken method + "gpt-4" + }; + tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64) + }) + .boxed() + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens()); + let completions = self.stream_completion(request, cx); + async move { + let mapper = OpenAiEventMapper::new(); + Ok(mapper.map_stream(completions.await?).boxed()) + } + .boxed() + } +} + +struct ConfigurationView { + api_key_editor: Entity, + state: gpui::Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| { + SingleLineInput::new( + window, + cx, + "000000000000000000000000000000000000000000000000000", + ) + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); + + let load_credentials_task = Some(cx.spawn_in(window, { + let state = state.clone(); + async move |this, cx| { + if let Some(task) = state + .update(cx, |state, cx| state.authenticate(cx)) + .log_err() + { + // We don't log an error, because "not signed in" is also an error. + let _ = task.await; + } + this.update(cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + api_key_editor, + state, + load_credentials_task, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self + .api_key_editor + .read(cx) + .editor() + .read(cx) + .text(cx) + .trim() + .to_string(); + + // Don't proceed if no API key is provided and we're not authenticated + if api_key.is_empty() && !self.state.read(cx).is_authenticated() { + return; + } + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(api_key, cx))? + .await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor.update(cx, |input, cx| { + input.editor.update(cx, |editor, cx| { + editor.set_text("", window, cx); + }); + }); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state.update(cx, |state, cx| state.reset_api_key(cx))?.await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn should_render_editor(&self, cx: &mut Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let env_var_set = self.state.read(cx).api_key_from_env; + let env_var_name = self.state.read(cx).env_var_name.clone(); + + let api_key_section = if self.should_render_editor(cx) { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(Label::new("To use Zed's assistant with an OpenAI compatible provider, you need to add an API key.")) + .child( + div() + .pt(DynamicSpacing::Base04.rems(cx)) + .child(self.api_key_editor.clone()) + ) + .child( + Label::new( + format!("You can also assign the {env_var_name} environment variable and restart Zed."), + ) + .size(LabelSize::Small).color(Color::Muted), + ) + .into_any() + } else { + h_flex() + .mt_1() + .p_1() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + .child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new(if env_var_set { + format!("API key set in {env_var_name} environment variable.") + } else { + "API key configured.".to_string() + })), + ) + .child( + Button::new("reset-api-key", "Reset API Key") + .label_size(LabelSize::Small) + .icon(IconName::Undo) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .layer(ElevationIndex::ModalSurface) + .when(env_var_set, |this| { + this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable."))) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))), + ) + .into_any() + }; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials…")).into_any() + } else { + v_flex().size_full().child(api_key_section).into_any() + } + } +} diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index dafbb62910..b163585aa7 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -1,4 +1,7 @@ +use std::sync::Arc; + use anyhow::Result; +use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -15,13 +18,14 @@ use crate::provider::{ mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings, + open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings, vercel::VercelSettings, x_ai::XAiSettings, }; /// Initializes the language model settings. -pub fn init(cx: &mut App) { +pub fn init_settings(cx: &mut App) { AllLanguageModelSettings::register(cx); } @@ -36,6 +40,7 @@ pub struct AllLanguageModelSettings { pub ollama: OllamaSettings, pub open_router: OpenRouterSettings, pub openai: OpenAiSettings, + pub openai_compatible: HashMap, OpenAiCompatibleSettings>, pub vercel: VercelSettings, pub x_ai: XAiSettings, pub zed_dot_dev: ZedDotDevSettings, @@ -52,6 +57,7 @@ pub struct AllLanguageModelSettingsContent { pub ollama: Option, pub open_router: Option, pub openai: Option, + pub openai_compatible: Option, OpenAiCompatibleSettingsContent>>, pub vercel: Option, pub x_ai: Option, #[serde(rename = "zed.dev")] @@ -103,6 +109,12 @@ pub struct OpenAiSettingsContent { pub available_models: Option>, } +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] +pub struct OpenAiCompatibleSettingsContent { + pub api_url: String, + pub available_models: Vec, +} + #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] pub struct VercelSettingsContent { pub api_url: Option, @@ -226,6 +238,19 @@ impl settings::Settings for AllLanguageModelSettings { openai.as_ref().and_then(|s| s.available_models.clone()), ); + // OpenAI Compatible + if let Some(openai_compatible) = value.openai_compatible.clone() { + for (id, openai_compatible_settings) in openai_compatible { + settings.openai_compatible.insert( + id, + OpenAiCompatibleSettings { + api_url: openai_compatible_settings.api_url, + available_models: openai_compatible_settings.available_models, + }, + ); + } + } + // Vercel let vercel = value.vercel.clone(); merge( diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index 2e926b7593..2145b34ef2 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -93,6 +93,7 @@ impl RenderOnce for Modal { #[derive(IntoElement)] pub struct ModalHeader { headline: Option, + description: Option, children: SmallVec<[AnyElement; 2]>, show_dismiss_button: bool, show_back_button: bool, @@ -108,6 +109,7 @@ impl ModalHeader { pub fn new() -> Self { Self { headline: None, + description: None, children: SmallVec::new(), show_dismiss_button: false, show_back_button: false, @@ -123,6 +125,11 @@ impl ModalHeader { self } + pub fn description(mut self, description: impl Into) -> Self { + self.description = Some(description.into()); + self + } + pub fn show_dismiss_button(mut self, show: bool) -> Self { self.show_dismiss_button = show; self @@ -171,7 +178,14 @@ impl RenderOnce for ModalHeader { }), ) }) - .child(div().flex_1().children(children)) + .child( + v_flex().flex_1().children(children).when_some( + self.description, + |this, description| { + this.child(Label::new(description).color(Color::Muted).mb_2()) + }, + ), + ) .when(self.show_dismiss_button, |this| { this.child( IconButton::new("dismiss", IconName::Close) diff --git a/crates/ui_input/src/ui_input.rs b/crates/ui_input/src/ui_input.rs index 18aa732e81..309b3f62f6 100644 --- a/crates/ui_input/src/ui_input.rs +++ b/crates/ui_input/src/ui_input.rs @@ -97,6 +97,10 @@ impl SingleLineInput { pub fn editor(&self) -> &Entity { &self.editor } + + pub fn text(&self, cx: &App) -> String { + self.editor().read(cx).text(cx) + } } impl Render for SingleLineInput { diff --git a/docs/src/ai/configuration.md b/docs/src/ai/configuration.md index 1201fa2173..414da2206f 100644 --- a/docs/src/ai/configuration.md +++ b/docs/src/ai/configuration.md @@ -444,14 +444,17 @@ Custom models will be listed in the model dropdown in the Agent Panel. ### OpenAI API Compatible {#openai-api-compatible} -Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and `available_models` for the OpenAI provider. +Zed supports using [OpenAI compatible APIs](https://platform.openai.com/docs/api-reference/chat) by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models. -Zed supports using OpenAI compatible APIs by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models. +To configure a compatible API, you can add a custom API URL for OpenAI either via the UI (currently available only in Preview) or by editing your `settings.json`. -To configure a compatible API, you can add a custom API URL for OpenAI either via the UI or by editing your `settings.json`. For example, to connect to [Together AI](https://www.together.ai/): +For example, to connect to [Together AI](https://www.together.ai/) via the UI: -1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys). -2. Add the following to your `settings.json`: +1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys). +2. Go to the Agent Panel's settings view, click on the "Add Provider" button, and then on the "OpenAI" menu item +3. Add the requested fields, such as `api_url`, `api_key`, available models, and others + +Alternatively, you can also add it via the `settings.json`: ```json { From 939f9fffa3f7d305280957dcf1573989ab429a7b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 22 Jul 2025 11:27:58 -0400 Subject: [PATCH 08/25] collab: Remove unneeded caching of Stripe meters (#34900) This PR removes the caching of Stripe meters on the `StripeBilling` object, as we weren't actually reading them anywhere. Release Notes: - N/A --- crates/collab/src/stripe_billing.rs | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs index 3d52dea0e3..707928d5cd 100644 --- a/crates/collab/src/stripe_billing.rs +++ b/crates/collab/src/stripe_billing.rs @@ -17,7 +17,7 @@ use crate::stripe_client::{ StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams, StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName, - StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, + StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection, UpdateSubscriptionItems, UpdateSubscriptionParams, @@ -30,7 +30,6 @@ pub struct StripeBilling { #[derive(Default)] struct StripeBillingState { - meters_by_event_name: HashMap, price_ids_by_meter_id: HashMap, prices_by_lookup_key: HashMap, } @@ -60,14 +59,7 @@ impl StripeBilling { let mut state = self.state.write().await; - let (meters, prices) = - futures::try_join!(self.client.list_meters(), self.client.list_prices())?; - - for meter in meters { - state - .meters_by_event_name - .insert(meter.event_name.clone(), meter); - } + let prices = self.client.list_prices().await?; for price in prices { if let Some(lookup_key) = price.lookup_key.clone() { From 96f994279167569a4458bb9f5cd636bfb82c0fac Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 22 Jul 2025 11:32:39 -0400 Subject: [PATCH 09/25] Add setting to disable all AI features (#34896) https://github.com/user-attachments/assets/674bba41-40ac-4a98-99e4-0b47f9097b6a Release Notes: - Added setting to disable all AI features --- Cargo.lock | 2 + assets/settings/default.json | 4 ++ crates/agent_ui/Cargo.toml | 1 + crates/agent_ui/src/agent_panel.rs | 8 ++- crates/agent_ui/src/agent_ui.rs | 65 ++++++++++++++++++- crates/agent_ui/src/inline_assistant.rs | 44 ++++++++++++- crates/assistant_tools/Cargo.toml | 1 + crates/assistant_tools/src/assistant_tools.rs | 3 +- crates/client/src/client.rs | 28 ++++++++ crates/copilot/src/copilot.rs | 46 ++++++++----- crates/git_ui/Cargo.toml | 1 + crates/git_ui/src/commit_modal.rs | 14 ++-- crates/git_ui/src/git_panel.rs | 19 ++++-- .../src/inline_completion_button.rs | 7 +- crates/welcome/src/welcome.rs | 36 +++++----- crates/workspace/src/dock.rs | 13 +++- crates/zed/src/main.rs | 1 + crates/zed/src/zed/quick_action_bar.rs | 26 +++++++- crates/zeta/src/init.rs | 57 ++++++++++++---- 19 files changed, 308 insertions(+), 68 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c7297e6d59..6237bac204 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -210,6 +210,7 @@ dependencies = [ "chrono", "client", "collections", + "command_palette_hooks", "component", "context_server", "db", @@ -6360,6 +6361,7 @@ dependencies = [ "buffer_diff", "call", "chrono", + "client", "collections", "command_palette_hooks", "component", diff --git a/assets/settings/default.json b/assets/settings/default.json index 309afaccf5..dab1684aef 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1076,6 +1076,10 @@ // Send anonymized usage data like what languages you're using Zed with. "metrics": true }, + // Whether to disable all AI features in Zed. + // + // Default: false + "disable_ai": false, // Automatically update Zed. This setting may be ignored on Linux if // installed through a package manager. "auto_update": true, diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 33042c0ebd..7d3b84e42e 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -32,6 +32,7 @@ buffer_diff.workspace = true chrono.workspace = true client.workspace = true collections.workspace = true +command_palette_hooks.workspace = true component.workspace = true context_server.workspace = true db.workspace = true diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index fc803c730e..7e9360a0cb 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -43,7 +43,7 @@ use anyhow::{Result, anyhow}; use assistant_context::{AssistantContext, ContextEvent, ContextSummary}; use assistant_slash_command::SlashCommandWorkingSet; use assistant_tool::ToolWorkingSet; -use client::{UserStore, zed_urls}; +use client::{DisableAiSettings, UserStore, zed_urls}; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; use feature_flags::{self, FeatureFlagAppExt}; use fs::Fs; @@ -744,6 +744,7 @@ impl AgentPanel { if workspace .panel::(cx) .is_some_and(|panel| panel.read(cx).enabled(cx)) + && !DisableAiSettings::get_global(cx).disable_ai { workspace.toggle_panel_focus::(window, cx); } @@ -1665,7 +1666,10 @@ impl Panel for AgentPanel { } fn icon(&self, _window: &Window, cx: &App) -> Option { - (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant) + (self.enabled(cx) + && AgentSettings::get_global(cx).button + && !DisableAiSettings::get_global(cx).disable_ai) + .then_some(IconName::ZedAssistant) } fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 7f69e8f66e..cac0f1adac 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -31,7 +31,8 @@ use std::sync::Arc; use agent::{Thread, ThreadId}; use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection}; use assistant_slash_command::SlashCommandRegistry; -use client::Client; +use client::{Client, DisableAiSettings}; +use command_palette_hooks::CommandPaletteFilter; use feature_flags::FeatureFlagAppExt as _; use fs::Fs; use gpui::{Action, App, Entity, actions}; @@ -43,6 +44,7 @@ use prompt_store::PromptBuilder; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings as _, SettingsStore}; +use std::any::TypeId; pub use crate::active_thread::ActiveThread; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; @@ -52,6 +54,7 @@ use crate::slash_command_settings::SlashCommandSettings; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor}; pub use ui::preview::{all_agent_previews, get_agent_preview}; +use zed_actions; actions!( agent, @@ -241,6 +244,66 @@ pub fn init( }) .detach(); cx.observe_new(ManageProfilesModal::register).detach(); + + // Update command palette filter based on AI settings + update_command_palette_filter(cx); + + // Watch for settings changes + cx.observe_global::(|app_cx| { + // When settings change, update the command palette filter + update_command_palette_filter(app_cx); + }) + .detach(); +} + +fn update_command_palette_filter(cx: &mut App) { + let disable_ai = DisableAiSettings::get_global(cx).disable_ai; + CommandPaletteFilter::update_global(cx, |filter, _| { + if disable_ai { + filter.hide_namespace("agent"); + filter.hide_namespace("assistant"); + filter.hide_namespace("zed_predict_onboarding"); + filter.hide_namespace("edit_prediction"); + + use editor::actions::{ + AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction, + PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction, + }; + let edit_prediction_actions = [ + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; + filter.hide_action_types(&edit_prediction_actions); + filter.hide_action_types(&[TypeId::of::()]); + } else { + filter.show_namespace("agent"); + filter.show_namespace("assistant"); + filter.show_namespace("zed_predict_onboarding"); + + filter.show_namespace("edit_prediction"); + + use editor::actions::{ + AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction, + PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction, + }; + let edit_prediction_actions = [ + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; + filter.show_action_types(edit_prediction_actions.iter()); + + filter + .show_action_types([TypeId::of::()].iter()); + } + }); } fn init_language_model_settings(cx: &mut App) { diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 65b72cbba5..44ec050ae2 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -16,7 +16,7 @@ use agent::{ }; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; -use client::telemetry::Telemetry; +use client::{DisableAiSettings, telemetry::Telemetry}; use collections::{HashMap, HashSet, VecDeque, hash_map}; use editor::SelectionEffects; use editor::{ @@ -57,6 +57,17 @@ pub fn init( cx: &mut App, ) { cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry)); + + cx.observe_global::(|cx| { + if DisableAiSettings::get_global(cx).disable_ai { + // Hide any active inline assist UI when AI is disabled + InlineAssistant::update_global(cx, |assistant, cx| { + assistant.cancel_all_active_completions(cx); + }); + } + }) + .detach(); + cx.observe_new(|_workspace: &mut Workspace, window, cx| { let Some(window) = window else { return; @@ -141,6 +152,26 @@ impl InlineAssistant { .detach(); } + /// Hides all active inline assists when AI is disabled + pub fn cancel_all_active_completions(&mut self, cx: &mut App) { + // Cancel all active completions in editors + for (editor_handle, _) in self.assists_by_editor.iter() { + if let Some(editor) = editor_handle.upgrade() { + let windows = cx.windows(); + if !windows.is_empty() { + let window = windows[0]; + let _ = window.update(cx, |_, window, cx| { + editor.update(cx, |editor, cx| { + if editor.has_active_inline_completion() { + editor.cancel(&Default::default(), window, cx); + } + }); + }); + } + } + } + } + fn handle_workspace_event( &mut self, workspace: Entity, @@ -176,7 +207,7 @@ impl InlineAssistant { window: &mut Window, cx: &mut App, ) { - let is_assistant2_enabled = true; + let is_assistant2_enabled = !DisableAiSettings::get_global(cx).disable_ai; if let Some(editor) = item.act_as::(cx) { editor.update(cx, |editor, cx| { @@ -199,6 +230,13 @@ impl InlineAssistant { cx, ); + if DisableAiSettings::get_global(cx).disable_ai { + // Cancel any active completions + if editor.has_active_inline_completion() { + editor.cancel(&Default::default(), window, cx); + } + } + // Remove the Assistant1 code action provider, as it still might be registered. editor.remove_code_action_provider("assistant".into(), window, cx); } else { @@ -219,7 +257,7 @@ impl InlineAssistant { cx: &mut Context, ) { let settings = AgentSettings::get_global(cx); - if !settings.enabled { + if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai { return; } diff --git a/crates/assistant_tools/Cargo.toml b/crates/assistant_tools/Cargo.toml index e234b62b14..146800e094 100644 --- a/crates/assistant_tools/Cargo.toml +++ b/crates/assistant_tools/Cargo.toml @@ -20,6 +20,7 @@ anyhow.workspace = true assistant_tool.workspace = true buffer_diff.workspace = true chrono.workspace = true +client.workspace = true collections.workspace = true component.workspace = true derive_more.workspace = true diff --git a/crates/assistant_tools/src/assistant_tools.rs b/crates/assistant_tools/src/assistant_tools.rs index eef792f526..57fdc51336 100644 --- a/crates/assistant_tools/src/assistant_tools.rs +++ b/crates/assistant_tools/src/assistant_tools.rs @@ -20,14 +20,13 @@ mod thinking_tool; mod ui; mod web_search_tool; -use std::sync::Arc; - use assistant_tool::ToolRegistry; use copy_path_tool::CopyPathTool; use gpui::{App, Entity}; use http_client::HttpClientWithUrl; use language_model::LanguageModelRegistry; use move_path_tool::MovePathTool; +use std::sync::Arc; use web_search_tool::WebSearchTool; pub(crate) use templates::*; diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 1be8ffdb55..81bb95b514 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -151,6 +151,7 @@ impl Settings for ProxySettings { pub fn init_settings(cx: &mut App) { TelemetrySettings::register(cx); + DisableAiSettings::register(cx); ClientSettings::register(cx); ProxySettings::register(cx); } @@ -548,6 +549,33 @@ impl settings::Settings for TelemetrySettings { } } +/// Whether to disable all AI features in Zed. +/// +/// Default: false +#[derive(Copy, Clone, Debug)] +pub struct DisableAiSettings { + pub disable_ai: bool, +} + +impl settings::Settings for DisableAiSettings { + const KEY: Option<&'static str> = Some("disable_ai"); + + type FileContent = Option; + + fn load(sources: SettingsSources, _: &mut App) -> Result { + Ok(Self { + disable_ai: sources + .user + .or(sources.server) + .copied() + .flatten() + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?), + }) + } + + fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {} +} + impl Client { pub fn new( clock: Arc, diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 1966d1a389..e11242cb15 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -6,6 +6,7 @@ mod sign_in; use crate::sign_in::initiate_sign_in_within_workspace; use ::fs::Fs; use anyhow::{Context as _, Result, anyhow}; +use client::DisableAiSettings; use collections::{HashMap, HashSet}; use command_palette_hooks::CommandPaletteFilter; use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared}; @@ -25,6 +26,7 @@ use node_runtime::NodeRuntime; use parking_lot::Mutex; use request::StatusNotification; use serde_json::json; +use settings::Settings; use settings::SettingsStore; use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace}; use std::collections::hash_map::Entry; @@ -93,26 +95,34 @@ pub fn init( let copilot_auth_action_types = [TypeId::of::()]; let copilot_no_auth_action_types = [TypeId::of::()]; let status = handle.read(cx).status(); + + let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; let filter = CommandPaletteFilter::global_mut(cx); - match status { - Status::Disabled => { - filter.hide_action_types(&copilot_action_types); - filter.hide_action_types(&copilot_auth_action_types); - filter.hide_action_types(&copilot_no_auth_action_types); - } - Status::Authorized => { - filter.hide_action_types(&copilot_no_auth_action_types); - filter.show_action_types( - copilot_action_types - .iter() - .chain(&copilot_auth_action_types), - ); - } - _ => { - filter.hide_action_types(&copilot_action_types); - filter.hide_action_types(&copilot_auth_action_types); - filter.show_action_types(copilot_no_auth_action_types.iter()); + if is_ai_disabled { + filter.hide_action_types(&copilot_action_types); + filter.hide_action_types(&copilot_auth_action_types); + filter.hide_action_types(&copilot_no_auth_action_types); + } else { + match status { + Status::Disabled => { + filter.hide_action_types(&copilot_action_types); + filter.hide_action_types(&copilot_auth_action_types); + filter.hide_action_types(&copilot_no_auth_action_types); + } + Status::Authorized => { + filter.hide_action_types(&copilot_no_auth_action_types); + filter.show_action_types( + copilot_action_types + .iter() + .chain(&copilot_auth_action_types), + ); + } + _ => { + filter.hide_action_types(&copilot_action_types); + filter.hide_action_types(&copilot_auth_action_types); + filter.show_action_types(copilot_no_auth_action_types.iter()); + } } } }) diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 6e04dcb656..2fb80b7e73 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -23,6 +23,7 @@ askpass.workspace = true buffer_diff.workspace = true call.workspace = true chrono.workspace = true +client.workspace = true collections.workspace = true command_palette_hooks.workspace = true component.workspace = true diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index ac3d24e3eb..b99f628806 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -1,8 +1,10 @@ use crate::branch_picker::{self, BranchList}; use crate::git_panel::{GitPanel, commit_message_editor}; +use client::DisableAiSettings; use git::repository::CommitOptions; use git::{Amend, Commit, GenerateCommitMessage, Signoff}; use panel::{panel_button, panel_editor_style}; +use settings::Settings; use ui::{ ContextMenu, KeybindingHint, PopoverMenu, PopoverMenuHandle, SplitButton, Tooltip, prelude::*, }; @@ -569,11 +571,13 @@ impl Render for CommitModal { .on_action(cx.listener(Self::dismiss)) .on_action(cx.listener(Self::commit)) .on_action(cx.listener(Self::amend)) - .on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| { - this.git_panel.update(cx, |panel, cx| { - panel.generate_commit_message(cx); - }) - })) + .when(!DisableAiSettings::get_global(cx).disable_ai, |this| { + this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| { + this.git_panel.update(cx, |panel, cx| { + panel.generate_commit_message(cx); + }) + })) + }) .on_action( cx.listener(|this, _: &zed_actions::git::Branch, window, cx| { this.toggle_branch_selector(window, cx); diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index e998586af4..061833a6c7 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -12,6 +12,7 @@ use crate::{ use agent_settings::AgentSettings; use anyhow::Context as _; use askpass::AskPassDelegate; +use client::DisableAiSettings; use db::kvp::KEY_VALUE_STORE; use editor::{ Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar, @@ -53,7 +54,7 @@ use project::{ git_store::{GitStoreEvent, Repository}, }; use serde::{Deserialize, Serialize}; -use settings::{Settings as _, SettingsStore}; +use settings::{Settings, SettingsStore}; use std::future::Future; use std::ops::Range; use std::path::{Path, PathBuf}; @@ -464,9 +465,14 @@ impl GitPanel { }; let mut assistant_enabled = AgentSettings::get_global(cx).enabled; + let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; let _settings_subscription = cx.observe_global::(move |_, cx| { - if assistant_enabled != AgentSettings::get_global(cx).enabled { + let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + if assistant_enabled != AgentSettings::get_global(cx).enabled + || was_ai_disabled != is_ai_disabled + { assistant_enabled = AgentSettings::get_global(cx).enabled; + was_ai_disabled = is_ai_disabled; cx.notify(); } }); @@ -1806,7 +1812,7 @@ impl GitPanel { /// Generates a commit message using an LLM. pub fn generate_commit_message(&mut self, cx: &mut Context) { - if !self.can_commit() { + if !self.can_commit() || DisableAiSettings::get_global(cx).disable_ai { return; } @@ -4305,8 +4311,10 @@ impl GitPanel { } fn current_language_model(cx: &Context<'_, GitPanel>) -> Option> { - agent_settings::AgentSettings::get_global(cx) - .enabled + let is_enabled = agent_settings::AgentSettings::get_global(cx).enabled + && !DisableAiSettings::get_global(cx).disable_ai; + + is_enabled .then(|| { let ConfiguredModel { provider, model } = LanguageModelRegistry::read_global(cx).commit_message_model()?; @@ -5037,6 +5045,7 @@ mod tests { language::init(cx); editor::init(cx); Project::init_settings(cx); + client::DisableAiSettings::register(cx); crate::init(cx); }); } diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 8a8eacdc6a..2615a8beef 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use client::{UserStore, zed_urls}; +use client::{DisableAiSettings, UserStore, zed_urls}; use copilot::{Copilot, Status}; use editor::{ Editor, SelectionEffects, @@ -72,6 +72,11 @@ enum SupermavenButtonStatus { impl Render for InlineCompletionButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + // Return empty div if AI is disabled + if DisableAiSettings::get_global(cx).disable_ai { + return div(); + } + let all_language_settings = all_language_settings(None, cx); match all_language_settings.edit_predictions.provider { diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index ea4ac13de7..49bf2031ab 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -1,4 +1,4 @@ -use client::{TelemetrySettings, telemetry::Telemetry}; +use client::{DisableAiSettings, TelemetrySettings, telemetry::Telemetry}; use db::kvp::KEY_VALUE_STORE; use gpui::{ Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, @@ -174,23 +174,25 @@ impl Render for WelcomePage { .ok(); })), ) - .child( - Button::new( - "try-zed-edit-prediction", - edit_prediction_label, + .when(!DisableAiSettings::get_global(cx).disable_ai, |parent| { + parent.child( + Button::new( + "edit_prediction_onboarding", + edit_prediction_label, + ) + .disabled(edit_prediction_provider_is_zed) + .icon(IconName::ZedPredict) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .on_click( + cx.listener(|_, _, window, cx| { + telemetry::event!("Welcome Screen Try Edit Prediction clicked"); + window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx); + }), + ), ) - .disabled(edit_prediction_provider_is_zed) - .icon(IconName::ZedPredict) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) - .on_click( - cx.listener(|_, _, window, cx| { - telemetry::event!("Welcome Screen Try Edit Prediction clicked"); - window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx); - }), - ), - ) + }) .child( Button::new("edit settings", "Edit Settings") .icon(IconName::Settings) diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 4e39c2d182..3f047e2f11 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -242,6 +242,7 @@ struct PanelEntry { pub struct PanelButtons { dock: Entity, + _settings_subscription: Subscription, } impl Dock { @@ -373,6 +374,12 @@ impl Dock { }) } + pub fn first_enabled_panel_idx_excluding(&self, exclude_name: &str, cx: &App) -> Option { + self.panel_entries.iter().position(|entry| { + entry.panel.persistent_name() != exclude_name && entry.panel.enabled(cx) + }) + } + fn active_panel_entry(&self) -> Option<&PanelEntry> { self.active_panel_index .and_then(|index| self.panel_entries.get(index)) @@ -833,7 +840,11 @@ impl Render for Dock { impl PanelButtons { pub fn new(dock: Entity, cx: &mut Context) -> Self { cx.observe(&dock, |_, _, cx| cx.notify()).detach(); - Self { dock } + let settings_subscription = cx.observe_global::(|_, cx| cx.notify()); + Self { + dock, + _settings_subscription: settings_subscription, + } } } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index c9b8eebff6..d0b9c53397 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -554,6 +554,7 @@ pub fn main() { supermaven::init(app_state.client.clone(), cx); language_model::init(app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); + agent_settings::init(cx); agent_servers::init(cx); web_search::init(cx); web_search_providers::init(app_state.client.clone(), cx); diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index c95d86c84f..aff124a0bc 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -2,6 +2,7 @@ mod preview; mod repl_menu; use agent_settings::AgentSettings; +use client::DisableAiSettings; use editor::actions::{ AddSelectionAbove, AddSelectionBelow, CodeActionSource, DuplicateLineDown, GoToDiagnostic, GoToHunk, GoToPreviousDiagnostic, GoToPreviousHunk, MoveLineDown, MoveLineUp, SelectAll, @@ -32,6 +33,7 @@ const MAX_CODE_ACTION_MENU_LINES: u32 = 16; pub struct QuickActionBar { _inlay_hints_enabled_subscription: Option, + _ai_settings_subscription: Subscription, active_item: Option>, buffer_search_bar: Entity, show: bool, @@ -46,8 +48,28 @@ impl QuickActionBar { workspace: &Workspace, cx: &mut Context, ) -> Self { + let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let mut was_agent_enabled = AgentSettings::get_global(cx).enabled; + let mut was_agent_button = AgentSettings::get_global(cx).button; + + let ai_settings_subscription = cx.observe_global::(move |_, cx| { + let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let agent_settings = AgentSettings::get_global(cx); + + if was_ai_disabled != is_ai_disabled + || was_agent_enabled != agent_settings.enabled + || was_agent_button != agent_settings.button + { + was_ai_disabled = is_ai_disabled; + was_agent_enabled = agent_settings.enabled; + was_agent_button = agent_settings.button; + cx.notify(); + } + }); + let mut this = Self { _inlay_hints_enabled_subscription: None, + _ai_settings_subscription: ai_settings_subscription, active_item: None, buffer_search_bar, show: true, @@ -575,7 +597,9 @@ impl Render for QuickActionBar { .children(self.render_preview_button(self.workspace.clone(), cx)) .children(search_button) .when( - AgentSettings::get_global(cx).enabled && AgentSettings::get_global(cx).button, + AgentSettings::get_global(cx).enabled + && AgentSettings::get_global(cx).button + && !DisableAiSettings::get_global(cx).disable_ai, |bar| bar.child(assistant_button), ) .children(code_actions_dropdown) diff --git a/crates/zeta/src/init.rs b/crates/zeta/src/init.rs index 4bcd50df88..4a65771223 100644 --- a/crates/zeta/src/init.rs +++ b/crates/zeta/src/init.rs @@ -1,10 +1,11 @@ use std::any::{Any, TypeId}; +use client::DisableAiSettings; use command_palette_hooks::CommandPaletteFilter; use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag}; use gpui::actions; use language::language_settings::{AllLanguageSettings, EditPredictionProvider}; -use settings::update_settings_file; +use settings::{Settings, SettingsStore, update_settings_file}; use ui::App; use workspace::Workspace; @@ -21,6 +22,8 @@ actions!( ); pub fn init(cx: &mut App) { + feature_gate_predict_edits_actions(cx); + cx.observe_new(move |workspace: &mut Workspace, _, _cx| { workspace.register_action(|workspace, _: &RateCompletions, window, cx| { if cx.has_flag::() { @@ -53,27 +56,57 @@ pub fn init(cx: &mut App) { }); }) .detach(); - - feature_gate_predict_edits_rating_actions(cx); } -fn feature_gate_predict_edits_rating_actions(cx: &mut App) { +fn feature_gate_predict_edits_actions(cx: &mut App) { let rate_completion_action_types = [TypeId::of::()]; + let reset_onboarding_action_types = [TypeId::of::()]; + let zeta_all_action_types = [ + TypeId::of::(), + TypeId::of::(), + zed_actions::OpenZedPredictOnboarding.type_id(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; CommandPaletteFilter::update_global(cx, |filter, _cx| { filter.hide_action_types(&rate_completion_action_types); + filter.hide_action_types(&reset_onboarding_action_types); filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]); }); + cx.observe_global::(move |cx| { + let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let has_feature_flag = cx.has_flag::(); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + if is_ai_disabled { + filter.hide_action_types(&zeta_all_action_types); + } else { + if has_feature_flag { + filter.show_action_types(rate_completion_action_types.iter()); + } else { + filter.hide_action_types(&rate_completion_action_types); + } + } + }); + }) + .detach(); + cx.observe_flag::(move |is_enabled, cx| { - if is_enabled { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.show_action_types(rate_completion_action_types.iter()); - }); - } else { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.hide_action_types(&rate_completion_action_types); - }); + if !DisableAiSettings::get_global(cx).disable_ai { + if is_enabled { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.show_action_types(rate_completion_action_types.iter()); + }); + } else { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_action_types(&rate_completion_action_types); + }); + } } }) .detach(); From 2b888e1d30c5f1876cedd1ddac18bb050a568ae2 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 22 Jul 2025 10:45:42 -0500 Subject: [PATCH 10/25] Fix redo after noop format (#34898) Closes #31917 Previously, as of #28457 we used a hack, creating an empty transaction in the history that we then merged formatting changes into in order to correctly identify concurrent edits to the buffer while formatting was happening. This caused issues with noop formatting however, as using the normal API of the buffer history (in an albeit weird way) resulted in the redo stack being cleared, regardless of whether the formatting transaction included edits or not, which is the correct behavior in all other contexts. This PR fixes the redo issue by codifying the behavior formatting wants, that being the ability to push an empty transaction to the history with no other side-effects (i.e. clearing the redo stack) to detect concurrent edits, with the tradeoff being that it must then manually remove the transaction later if no changes occurred from the formatting. The redo stack is still cleared when there are formatting edits, as the individual format steps use the normal `{start,end}_transaction` methods which clear the redo stack if the finished transaction isn't empty. Release Notes: - Fixed an issue where redo would not work after buffer formatting (including formatting on save) when the formatting did not result in any changes --- crates/editor/src/editor_tests.rs | 70 ++++++++++++++++++++++++++++++- crates/language/src/buffer.rs | 15 +++++++ crates/project/src/lsp_store.rs | 8 +--- crates/text/src/text.rs | 52 ++++++++++++++++++++++- 4 files changed, 137 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 4efb052c71..fbb877796c 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -9570,6 +9570,74 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) { } } +#[gpui::test] +async fn test_redo_after_noop_format(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.ensure_final_newline_on_save = Some(false); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file(path!("/file.txt"), "foo".into()).await; + + let project = Project::test(fs, [path!("/file.txt").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.txt"), cx) + }) + .await + .unwrap(); + + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| { + build_editor_with_project(project.clone(), buffer, window, cx) + }); + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::default(), window, cx, |s| { + s.select_ranges([0..0]) + }); + }); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + editor.update_in(cx, |editor, window, cx| { + editor.handle_input("\n", window, cx) + }); + cx.run_until_parked(); + save(&editor, &project, cx).await; + assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx))); + + editor.update_in(cx, |editor, window, cx| { + editor.undo(&Default::default(), window, cx); + }); + save(&editor, &project, cx).await; + assert_eq!("foo", editor.read_with(cx, |editor, cx| editor.text(cx))); + + editor.update_in(cx, |editor, window, cx| { + editor.redo(&Default::default(), window, cx); + }); + cx.run_until_parked(); + assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx))); + + async fn save(editor: &Entity, project: &Entity, cx: &mut VisualTestContext) { + let save = editor + .update_in(cx, |editor, window, cx| { + editor.save( + SaveOptions { + format: true, + autosave: false, + }, + project.clone(), + window, + cx, + ) + }) + .unwrap(); + cx.executor().start_waiting(); + save.await; + assert!(!cx.read(|cx| editor.is_dirty(cx))); + } +} + #[gpui::test] async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -22708,7 +22776,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC workspace::init_settings(cx); crate::init(cx); }); - + zlog::init_test(); update_test_language_settings(cx, f); } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 59aa63ff38..83517accc2 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2072,6 +2072,21 @@ impl Buffer { self.text.push_transaction(transaction, now); } + /// Differs from `push_transaction` in that it does not clear the redo + /// stack. Intended to be used to create a parent transaction to merge + /// potential child transactions into. + /// + /// The caller is responsible for removing it from the undo history using + /// `forget_transaction` if no edits are merged into it. Otherwise, if edits + /// are merged into this transaction, the caller is responsible for ensuring + /// the redo stack is cleared. The easiest way to ensure the redo stack is + /// cleared is to create transactions with the usual `start_transaction` and + /// `end_transaction` methods and merging the resulting transactions into + /// the transaction created by this method + pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId { + self.text.push_empty_transaction(now) + } + /// Prevent the last transaction from being grouped with any subsequent transactions, /// even if they occur with the buffer's undo grouping duration. pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 28cbfcdd18..0cd375e0c5 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1274,15 +1274,11 @@ impl LocalLspStore { // grouped with the previous transaction in the history // based on the transaction group interval buffer.finalize_last_transaction(); - let transaction_id = buffer + buffer .start_transaction() .context("transaction already open")?; - let transaction = buffer - .get_transaction(transaction_id) - .expect("transaction started") - .clone(); buffer.end_transaction(cx); - buffer.push_transaction(transaction, cx.background_executor().now()); + let transaction_id = buffer.push_empty_transaction(cx.background_executor().now()); buffer.finalize_last_transaction(); anyhow::Ok(transaction_id) })??; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a2742081f4..aa9682029e 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -320,7 +320,39 @@ impl History { last_edit_at: now, suppress_grouping: false, }); - self.redo_stack.clear(); + } + + /// Differs from `push_transaction` in that it does not clear the redo + /// stack. Intended to be used to create a parent transaction to merge + /// potential child transactions into. + /// + /// The caller is responsible for removing it from the undo history using + /// `forget_transaction` if no edits are merged into it. Otherwise, if edits + /// are merged into this transaction, the caller is responsible for ensuring + /// the redo stack is cleared. The easiest way to ensure the redo stack is + /// cleared is to create transactions with the usual `start_transaction` and + /// `end_transaction` methods and merging the resulting transactions into + /// the transaction created by this method + fn push_empty_transaction( + &mut self, + start: clock::Global, + now: Instant, + clock: &mut clock::Lamport, + ) -> TransactionId { + assert_eq!(self.transaction_depth, 0); + let id = clock.tick(); + let transaction = Transaction { + id, + start, + edit_ids: Vec::new(), + }; + self.undo_stack.push(HistoryEntry { + transaction, + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }); + id } fn push_undo(&mut self, op_id: clock::Lamport) { @@ -1495,6 +1527,24 @@ impl Buffer { self.history.push_transaction(transaction, now); } + /// Differs from `push_transaction` in that it does not clear the redo stack. + /// The caller responsible for + /// Differs from `push_transaction` in that it does not clear the redo + /// stack. Intended to be used to create a parent transaction to merge + /// potential child transactions into. + /// + /// The caller is responsible for removing it from the undo history using + /// `forget_transaction` if no edits are merged into it. Otherwise, if edits + /// are merged into this transaction, the caller is responsible for ensuring + /// the redo stack is cleared. The easiest way to ensure the redo stack is + /// cleared is to create transactions with the usual `start_transaction` and + /// `end_transaction` methods and merging the resulting transactions into + /// the transaction created by this method + pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId { + self.history + .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock) + } + pub fn edited_ranges_for_transaction_id( &self, transaction_id: TransactionId, From 56b99f49fdbba2fe87e4693a646e88a613fdadc0 Mon Sep 17 00:00:00 2001 From: tiagoq <47694386+tiagoq@users.noreply.github.com> Date: Tue, 22 Jul 2025 11:55:24 -0400 Subject: [PATCH 11/25] bedrock: Fix remaining streaming delays (#33931) Closes #26030 *Note: This is my first contribution to Zed* This addresses a second streaming bottleneck in Bedrock that remained after the initial fix in #28281 (released in preview 194). The issue is in the mechanism used to convert Zed's internal `AsyncBody` into the `SdkBody` expected by the Bedrock language provider. We are using a non-streaming converter that buffers responses. **How the fix works:** The AWS SDK provides streaming-compatible converters to create `SdkBody` instances, but these require the input body to implement the `Body` trait from the `http-body` crate. This PR enables streaming by implementing the required trait and switching to the streaming-compatible converter. **Changes (2 commits):** * 1st Commit - **Implement http-body Body trait for AsyncBody:** - Add `http-body = 1.0` dependency (already an indirect dependency) - Implement the `Body` trait for our existing `AsyncBody` type - Uses `poll_frame` to read data chunks asynchronously, preserving streaming behavior * 2nd Commit - **Use streaming-compatible AWS SDK converter:** - Create `SdkBody` using `SdkBody::from_body_1_x()` with the new `Body` trait implementation **Details/FAQ:** **Q: Why add another dependency?** A: We tried to avoid adding a dependency, but the AWS SDK requires the `Body` trait and `http-body` is where it's defined. The crate is already an indirect dependency, making this a reasonable solution. **Q: Why modify the shared `http_client` crate instead of just `aws_bedrock_client`?** A: We considered implementing the `Body` trait on a wrapper in `aws_bedrock_client`, but since `AsyncBody` already uses `http` crate types, extending support to the companion `http-body` crate seems reasonable and may benefit other integrations. **Q: How was this bottleneck discovered?** A: After @5herlocked's initial streaming fix in #28281, I tested preview 194 and noticed streaming still had issues. I found a way to reproduce the problem and chatted with @5herlocked about it. He immediately pinpointed the exact location where the issue was occurring, his diagnosis made this fix possible. **Q: How does this relate to the previous fix?** A: #28281 fixed buffering issues higher in the stack, but unfortunately there was another bottleneck lower-down in the aws-http-client. This PR addresses that separate buffering issue. **Q: Does this use zero-copy or one-copy?** A: The `Body` implementation includes one copy. Someone more knowledgeable might be able to achieve a zero-copy approach, but we opted for a conservative approach. The performance impact should not be perceptible in typical usage. **Testing:** Confirmed that Bedrock streaming now works without buffering delays in a local build. Release Notes: - Improved Bedrock streaming by eliminating response buffering delays --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 3 +- Cargo.toml | 1 + crates/aws_http_client/Cargo.toml | 2 - crates/aws_http_client/src/aws_http_client.rs | 39 +++++-------------- crates/http_client/Cargo.toml | 1 + crates/http_client/src/async_body.rs | 22 +++++++++++ .../language_models/src/provider/bedrock.rs | 8 +--- 7 files changed, 36 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6237bac204..c64995b01b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1872,9 +1872,7 @@ version = "0.1.0" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", - "futures 0.3.31", "http_client", - "tokio", "workspace-hack", ] @@ -7857,6 +7855,7 @@ dependencies = [ "derive_more 0.99.19", "futures 0.3.31", "http 1.3.1", + "http-body 1.0.1", "log", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index ea8690f2b3..ec793a7429 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -482,6 +482,7 @@ heed = { version = "0.21.0", features = ["read-txn-no-tls"] } hex = "0.4.3" html5ever = "0.27.0" http = "1.1" +http-body = "1.0" hyper = "0.14" ignore = "0.4.22" image = "0.25.1" diff --git a/crates/aws_http_client/Cargo.toml b/crates/aws_http_client/Cargo.toml index 3760f70fe0..2749286d4c 100644 --- a/crates/aws_http_client/Cargo.toml +++ b/crates/aws_http_client/Cargo.toml @@ -17,7 +17,5 @@ default = [] [dependencies] aws-smithy-runtime-api.workspace = true aws-smithy-types.workspace = true -futures.workspace = true http_client.workspace = true -tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } workspace-hack.workspace = true diff --git a/crates/aws_http_client/src/aws_http_client.rs b/crates/aws_http_client/src/aws_http_client.rs index 6adb995747..d08c8e64a7 100644 --- a/crates/aws_http_client/src/aws_http_client.rs +++ b/crates/aws_http_client/src/aws_http_client.rs @@ -11,14 +11,11 @@ use aws_smithy_runtime_api::client::result::ConnectorError; use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents; use aws_smithy_runtime_api::http::{Headers, StatusCode}; use aws_smithy_types::body::SdkBody; -use futures::AsyncReadExt; -use http_client::{AsyncBody, Inner}; +use http_client::AsyncBody; use http_client::{HttpClient, Request}; -use tokio::runtime::Handle; struct AwsHttpConnector { client: Arc, - handle: Handle, } impl std::fmt::Debug for AwsHttpConnector { @@ -42,18 +39,17 @@ impl AwsConnector for AwsHttpConnector { .client .send(Request::from_parts(parts, convert_to_async_body(body))); - let handle = self.handle.clone(); - HttpConnectorFuture::new(async move { let response = match response.await { Ok(response) => response, Err(err) => return Err(ConnectorError::other(err.into(), None)), }; let (parts, body) = response.into_parts(); - let body = convert_to_sdk_body(body, handle).await; - let mut response = - HttpResponse::new(StatusCode::try_from(parts.status.as_u16()).unwrap(), body); + let mut response = HttpResponse::new( + StatusCode::try_from(parts.status.as_u16()).unwrap(), + convert_to_sdk_body(body), + ); let headers = match Headers::try_from(parts.headers) { Ok(headers) => headers, @@ -70,7 +66,6 @@ impl AwsConnector for AwsHttpConnector { #[derive(Clone)] pub struct AwsHttpClient { client: Arc, - handler: Handle, } impl std::fmt::Debug for AwsHttpClient { @@ -80,11 +75,8 @@ impl std::fmt::Debug for AwsHttpClient { } impl AwsHttpClient { - pub fn new(client: Arc, handle: Handle) -> Self { - Self { - client, - handler: handle, - } + pub fn new(client: Arc) -> Self { + Self { client } } } @@ -96,25 +88,12 @@ impl AwsClient for AwsHttpClient { ) -> SharedHttpConnector { SharedHttpConnector::new(AwsHttpConnector { client: self.client.clone(), - handle: self.handler.clone(), }) } } -pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody { - match body.0 { - Inner::Empty => SdkBody::empty(), - Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()), - Inner::AsyncReader(mut reader) => { - let buffer = handle.spawn(async move { - let mut buffer = Vec::new(); - let _ = reader.read_to_end(&mut buffer).await; - buffer - }); - - SdkBody::from(buffer.await.unwrap_or_default()) - } - } +pub fn convert_to_sdk_body(body: AsyncBody) -> SdkBody { + SdkBody::from_body_1_x(body) } pub fn convert_to_async_body(body: SdkBody) -> AsyncBody { diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 2b114f240a..2045708ff2 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -21,6 +21,7 @@ anyhow.workspace = true derive_more.workspace = true futures.workspace = true http.workspace = true +http-body.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index caf8089d0f..88972d279c 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -6,6 +6,7 @@ use std::{ use bytes::Bytes; use futures::AsyncRead; +use http_body::{Body, Frame}; /// Based on the implementation of AsyncBody in /// . @@ -114,3 +115,24 @@ impl futures::AsyncRead for AsyncBody { } } } + +impl Body for AsyncBody { + type Data = Bytes; + type Error = std::io::Error; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll, Self::Error>>> { + let mut buffer = vec![0; 8192]; + match AsyncRead::poll_read(self.as_mut(), cx, &mut buffer) { + Poll::Ready(Ok(0)) => Poll::Ready(None), + Poll::Ready(Ok(n)) => { + let data = Bytes::copy_from_slice(&buffer[..n]); + Poll::Ready(Some(Ok(Frame::data(data)))) + } + Poll::Ready(Err(e)) => Poll::Ready(Some(Err(e))), + Poll::Pending => Poll::Pending, + } + } +} diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 65ce1dbc4b..a022511b11 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -258,13 +258,9 @@ impl BedrockLanguageModelProvider { }), }); - let tokio_handle = Tokio::handle(cx); - - let coerced_client = AwsHttpClient::new(http_client.clone(), tokio_handle.clone()); - Self { - http_client: coerced_client, - handler: tokio_handle.clone(), + http_client: AwsHttpClient::new(http_client.clone()), + handler: Tokio::handle(cx), state, } } From fa3e1ccc37373777756283829d180e41e63f8d1a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 22 Jul 2025 18:19:51 +0200 Subject: [PATCH 12/25] chore: Bump taffy to 0.8.3 (#34876) That's the latest release. Note that we have an opportunity to simplify our size types per https://github.com/DioxusLabs/taffy/blob/main/CHANGELOG.md#highlights-1 (though that's left out of this PR) image Release Notes: - N/A --- Cargo.lock | 9 ++++----- crates/gpui/Cargo.toml | 2 +- crates/gpui/src/taffy.rs | 26 +++++++++++++------------- 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c64995b01b..08d29cdc80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7401,9 +7401,9 @@ dependencies = [ [[package]] name = "grid" -version = "0.14.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be136d9dacc2a13cc70bb6c8f902b414fb2641f8db1314637c6b7933411a8f82" +checksum = "71b01d27060ad58be4663b9e4ac9e2d4806918e8876af8912afbddd1a91d5eaa" [[package]] name = "group" @@ -15958,13 +15958,12 @@ dependencies = [ [[package]] name = "taffy" -version = "0.5.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8b61630cba2afd2c851821add2e1bb1b7851a2436e839ab73b56558b009035e" +checksum = "7aaef0ac998e6527d6d0d5582f7e43953bb17221ac75bb8eb2fcc2db3396db1c" dependencies = [ "arrayvec", "grid", - "num-traits", "serde", "slotmap", ] diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 878794647a..68c0ea89c7 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -121,7 +121,7 @@ smallvec.workspace = true smol.workspace = true strum.workspace = true sum_tree.workspace = true -taffy = "=0.5.1" +taffy = "=0.8.3" thiserror.workspace = true util.workspace = true uuid.workspace = true diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index 6228a60490..f7fa54256d 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -283,7 +283,7 @@ impl ToTaffy for Length { fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::LengthPercentageAuto { match self { Length::Definite(length) => length.to_taffy(rem_size), - Length::Auto => taffy::prelude::LengthPercentageAuto::Auto, + Length::Auto => taffy::prelude::LengthPercentageAuto::auto(), } } } @@ -292,7 +292,7 @@ impl ToTaffy for Length { fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::Dimension { match self { Length::Definite(length) => length.to_taffy(rem_size), - Length::Auto => taffy::prelude::Dimension::Auto, + Length::Auto => taffy::prelude::Dimension::auto(), } } } @@ -302,14 +302,14 @@ impl ToTaffy for DefiniteLength { match self { DefiniteLength::Absolute(length) => match length { AbsoluteLength::Pixels(pixels) => { - taffy::style::LengthPercentage::Length(pixels.into()) + taffy::style::LengthPercentage::length(pixels.into()) } AbsoluteLength::Rems(rems) => { - taffy::style::LengthPercentage::Length((*rems * rem_size).into()) + taffy::style::LengthPercentage::length((*rems * rem_size).into()) } }, DefiniteLength::Fraction(fraction) => { - taffy::style::LengthPercentage::Percent(*fraction) + taffy::style::LengthPercentage::percent(*fraction) } } } @@ -320,14 +320,14 @@ impl ToTaffy for DefiniteLength { match self { DefiniteLength::Absolute(length) => match length { AbsoluteLength::Pixels(pixels) => { - taffy::style::LengthPercentageAuto::Length(pixels.into()) + taffy::style::LengthPercentageAuto::length(pixels.into()) } AbsoluteLength::Rems(rems) => { - taffy::style::LengthPercentageAuto::Length((*rems * rem_size).into()) + taffy::style::LengthPercentageAuto::length((*rems * rem_size).into()) } }, DefiniteLength::Fraction(fraction) => { - taffy::style::LengthPercentageAuto::Percent(*fraction) + taffy::style::LengthPercentageAuto::percent(*fraction) } } } @@ -337,12 +337,12 @@ impl ToTaffy for DefiniteLength { fn to_taffy(&self, rem_size: Pixels) -> taffy::style::Dimension { match self { DefiniteLength::Absolute(length) => match length { - AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::Length(pixels.into()), + AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::length(pixels.into()), AbsoluteLength::Rems(rems) => { - taffy::style::Dimension::Length((*rems * rem_size).into()) + taffy::style::Dimension::length((*rems * rem_size).into()) } }, - DefiniteLength::Fraction(fraction) => taffy::style::Dimension::Percent(*fraction), + DefiniteLength::Fraction(fraction) => taffy::style::Dimension::percent(*fraction), } } } @@ -350,9 +350,9 @@ impl ToTaffy for DefiniteLength { impl ToTaffy for AbsoluteLength { fn to_taffy(&self, rem_size: Pixels) -> taffy::style::LengthPercentage { match self { - AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::Length(pixels.into()), + AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::length(pixels.into()), AbsoluteLength::Rems(rems) => { - taffy::style::LengthPercentage::Length((*rems * rem_size).into()) + taffy::style::LengthPercentage::length((*rems * rem_size).into()) } } } From 64d0fec699607f4cef8b2625fef62d1ea105a2ed Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 22 Jul 2025 18:20:48 +0200 Subject: [PATCH 13/25] sum_tree: Store context on cursor (#34904) This gets rid of the need to pass context to all cursor functions. In practice context is always immutable when interacting with cursors. A nicety of this is in the follow-up PR we will be able to implement Iterator for all Cursors/filter cursors (hell, we may be able to get rid of filter cursor altogether, as it is just a custom `filter` impl on iterator trait). Release Notes: - N/A --- crates/buffer_diff/src/buffer_diff.rs | 42 ++- crates/channel/src/channel_chat.rs | 30 +- crates/editor/src/display_map/block_map.rs | 96 +++--- crates/editor/src/display_map/crease_map.rs | 24 +- crates/editor/src/display_map/fold_map.rs | 119 ++++---- crates/editor/src/display_map/inlay_map.rs | 94 +++--- crates/editor/src/display_map/wrap_map.rs | 106 +++---- crates/editor/src/git/blame.rs | 14 +- crates/gpui/src/elements/list.rs | 54 ++-- crates/language/src/diagnostic_set.rs | 8 +- crates/language/src/syntax_map.rs | 35 ++- crates/multi_buffer/src/multi_buffer.rs | 286 +++++++++--------- .../notifications/src/notification_store.rs | 10 +- crates/project/src/git_store.rs | 4 +- crates/project/src/git_store/git_traversal.rs | 7 +- crates/rope/src/rope.rs | 104 +++---- crates/sum_tree/src/cursor.rs | 173 ++++++----- crates/sum_tree/src/sum_tree.rs | 222 +++++--------- crates/sum_tree/src/tree_map.rs | 34 +-- crates/text/src/anchor.rs | 2 +- crates/text/src/text.rs | 115 ++++--- crates/text/src/undo_map.rs | 2 - crates/worktree/src/worktree.rs | 44 ++- 23 files changed, 749 insertions(+), 876 deletions(-) diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index ee09fda46e..97f529fe37 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -343,8 +343,7 @@ impl BufferDiffInner { .. } in hunks.iter().cloned() { - let preceding_pending_hunks = - old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer); + let preceding_pending_hunks = old_pending_hunks.slice(&buffer_range.start, Bias::Left); pending_hunks.append(preceding_pending_hunks, buffer); // Skip all overlapping or adjacent old pending hunks @@ -355,7 +354,7 @@ impl BufferDiffInner { .cmp(&buffer_range.end, buffer) .is_le() }) { - old_pending_hunks.next(buffer); + old_pending_hunks.next(); } if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk) @@ -379,10 +378,10 @@ impl BufferDiffInner { ); } // append the remainder - pending_hunks.append(old_pending_hunks.suffix(buffer), buffer); + pending_hunks.append(old_pending_hunks.suffix(), buffer); let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::(buffer); - unstaged_hunk_cursor.next(buffer); + unstaged_hunk_cursor.next(); // then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits let mut prev_unstaged_hunk_buffer_end = 0; @@ -397,8 +396,7 @@ impl BufferDiffInner { }) = pending_hunks_iter.next() { // Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk - let skipped_unstaged = - unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer); + let skipped_unstaged = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left); if let Some(unstaged_hunk) = skipped_unstaged.last() { prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end; @@ -425,7 +423,7 @@ impl BufferDiffInner { buffer_offset_range.end = buffer_offset_range.end.max(unstaged_hunk_offset_range.end); - unstaged_hunk_cursor.next(buffer); + unstaged_hunk_cursor.next(); continue; } } @@ -514,7 +512,7 @@ impl BufferDiffInner { }); let anchor_iter = iter::from_fn(move || { - cursor.next(buffer); + cursor.next(); cursor.item() }) .flat_map(move |hunk| { @@ -531,12 +529,12 @@ impl BufferDiffInner { }); let mut pending_hunks_cursor = self.pending_hunks.cursor::(buffer); - pending_hunks_cursor.next(buffer); + pending_hunks_cursor.next(); let mut secondary_cursor = None; if let Some(secondary) = secondary.as_ref() { let mut cursor = secondary.hunks.cursor::(buffer); - cursor.next(buffer); + cursor.next(); secondary_cursor = Some(cursor); } @@ -564,7 +562,7 @@ impl BufferDiffInner { .cmp(&pending_hunks_cursor.start().buffer_range.start, buffer) .is_gt() { - pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer); + pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left); } if let Some(pending_hunk) = pending_hunks_cursor.item() { @@ -590,7 +588,7 @@ impl BufferDiffInner { .cmp(&secondary_cursor.start().buffer_range.start, buffer) .is_gt() { - secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer); + secondary_cursor.seek_forward(&start_anchor, Bias::Left); } if let Some(secondary_hunk) = secondary_cursor.item() { @@ -635,7 +633,7 @@ impl BufferDiffInner { }); iter::from_fn(move || { - cursor.prev(buffer); + cursor.prev(); let hunk = cursor.item()?; let range = hunk.buffer_range.to_point(buffer); @@ -653,8 +651,8 @@ impl BufferDiffInner { fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option> { let mut new_cursor = self.hunks.cursor::<()>(new_snapshot); let mut old_cursor = old.hunks.cursor::<()>(new_snapshot); - old_cursor.next(new_snapshot); - new_cursor.next(new_snapshot); + old_cursor.next(); + new_cursor.next(); let mut start = None; let mut end = None; @@ -669,7 +667,7 @@ impl BufferDiffInner { Ordering::Less => { start.get_or_insert(new_hunk.buffer_range.start); end.replace(new_hunk.buffer_range.end); - new_cursor.next(new_snapshot); + new_cursor.next(); } Ordering::Equal => { if new_hunk != old_hunk { @@ -686,25 +684,25 @@ impl BufferDiffInner { } } - new_cursor.next(new_snapshot); - old_cursor.next(new_snapshot); + new_cursor.next(); + old_cursor.next(); } Ordering::Greater => { start.get_or_insert(old_hunk.buffer_range.start); end.replace(old_hunk.buffer_range.end); - old_cursor.next(new_snapshot); + old_cursor.next(); } } } (Some(new_hunk), None) => { start.get_or_insert(new_hunk.buffer_range.start); end.replace(new_hunk.buffer_range.end); - new_cursor.next(new_snapshot); + new_cursor.next(); } (None, Some(old_hunk)) => { start.get_or_insert(old_hunk.buffer_range.start); end.replace(old_hunk.buffer_range.end); - old_cursor.next(new_snapshot); + old_cursor.next(); } (None, None) => break, } diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 8394972d43..866e3ccd90 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -333,7 +333,7 @@ impl ChannelChat { if first_id <= message_id { let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&()); let message_id = ChannelMessageId::Saved(message_id); - cursor.seek(&message_id, Bias::Left, &()); + cursor.seek(&message_id, Bias::Left); return ControlFlow::Break( if cursor .item() @@ -499,7 +499,7 @@ impl ChannelChat { pub fn message(&self, ix: usize) -> &ChannelMessage { let mut cursor = self.messages.cursor::(&()); - cursor.seek(&Count(ix), Bias::Right, &()); + cursor.seek(&Count(ix), Bias::Right); cursor.item().unwrap() } @@ -516,13 +516,13 @@ impl ChannelChat { pub fn messages_in_range(&self, range: Range) -> impl Iterator { let mut cursor = self.messages.cursor::(&()); - cursor.seek(&Count(range.start), Bias::Right, &()); + cursor.seek(&Count(range.start), Bias::Right); cursor.take(range.len()) } pub fn pending_messages(&self) -> impl Iterator { let mut cursor = self.messages.cursor::(&()); - cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); + cursor.seek(&ChannelMessageId::Pending(0), Bias::Left); cursor } @@ -588,9 +588,9 @@ impl ChannelChat { .collect::>(); let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&()); - let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); + let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left); let start_ix = old_cursor.start().1.0; - let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); + let removed_messages = old_cursor.slice(&last_message.id, Bias::Right); let removed_count = removed_messages.summary().count; let new_count = messages.summary().count; let end_ix = start_ix + removed_count; @@ -599,10 +599,10 @@ impl ChannelChat { let mut ranges = Vec::>::new(); if new_messages.last().unwrap().is_pending() { - new_messages.append(old_cursor.suffix(&()), &()); + new_messages.append(old_cursor.suffix(), &()); } else { new_messages.append( - old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()), + old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left), &(), ); @@ -617,7 +617,7 @@ impl ChannelChat { } else { new_messages.push(message.clone(), &()); } - old_cursor.next(&()); + old_cursor.next(); } } @@ -641,12 +641,12 @@ impl ChannelChat { fn message_removed(&mut self, id: u64, cx: &mut Context) { let mut cursor = self.messages.cursor::(&()); - let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); + let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left); if let Some(item) = cursor.item() { if item.id == ChannelMessageId::Saved(id) { let deleted_message_ix = messages.summary().count; - cursor.next(&()); - messages.append(cursor.suffix(&()), &()); + cursor.next(); + messages.append(cursor.suffix(), &()); drop(cursor); self.messages = messages; @@ -680,7 +680,7 @@ impl ChannelChat { cx: &mut Context, ) { let mut cursor = self.messages.cursor::(&()); - let mut messages = cursor.slice(&id, Bias::Left, &()); + let mut messages = cursor.slice(&id, Bias::Left); let ix = messages.summary().count; if let Some(mut message_to_update) = cursor.item().cloned() { @@ -688,10 +688,10 @@ impl ChannelChat { message_to_update.mentions = mentions; message_to_update.edited_at = edited_at; messages.push(message_to_update, &()); - cursor.next(&()); + cursor.next(); } - messages.append(cursor.suffix(&()), &()); + messages.append(cursor.suffix(), &()); drop(cursor); self.messages = messages; diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index c761e0d69c..85495a2611 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -524,10 +524,10 @@ impl BlockMap { // * Isomorphic transforms that end *at* the start of the edit // * Below blocks that end at the start of the edit // However, if we hit a replace block that ends at the start of the edit we want to reconstruct it. - new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &()); + new_transforms.append(cursor.slice(&old_start, Bias::Left), &()); if let Some(transform) = cursor.item() { if transform.summary.input_rows > 0 - && cursor.end(&()) == old_start + && cursor.end() == old_start && transform .block .as_ref() @@ -535,13 +535,13 @@ impl BlockMap { { // Preserve the transform (push and next) new_transforms.push(transform.clone(), &()); - cursor.next(&()); + cursor.next(); // Preserve below blocks at end of edit while let Some(transform) = cursor.item() { if transform.block.as_ref().map_or(false, |b| b.place_below()) { new_transforms.push(transform.clone(), &()); - cursor.next(&()); + cursor.next(); } else { break; } @@ -579,8 +579,8 @@ impl BlockMap { let mut new_end = WrapRow(edit.new.end); loop { // Seek to the transform starting at or after the end of the edit - cursor.seek(&old_end, Bias::Left, &()); - cursor.next(&()); + cursor.seek(&old_end, Bias::Left); + cursor.next(); // Extend edit to the end of the discarded transform so it is reconstructed in full let transform_rows_after_edit = cursor.start().0 - old_end.0; @@ -592,8 +592,8 @@ impl BlockMap { if next_edit.old.start <= cursor.start().0 { old_end = WrapRow(next_edit.old.end); new_end = WrapRow(next_edit.new.end); - cursor.seek(&old_end, Bias::Left, &()); - cursor.next(&()); + cursor.seek(&old_end, Bias::Left); + cursor.next(); edits.next(); } else { break; @@ -608,7 +608,7 @@ impl BlockMap { // Discard below blocks at the end of the edit. They'll be reconstructed. while let Some(transform) = cursor.item() { if transform.block.as_ref().map_or(false, |b| b.place_below()) { - cursor.next(&()); + cursor.next(); } else { break; } @@ -720,7 +720,7 @@ impl BlockMap { push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot); } - new_transforms.append(cursor.suffix(&()), &()); + new_transforms.append(cursor.suffix(), &()); debug_assert_eq!( new_transforms.summary().input_rows, wrap_snapshot.max_point().row() + 1 @@ -971,7 +971,7 @@ impl BlockMapReader<'_> { ); let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); - cursor.seek(&start_wrap_row, Bias::Left, &()); + cursor.seek(&start_wrap_row, Bias::Left); while let Some(transform) = cursor.item() { if cursor.start().0 > end_wrap_row { break; @@ -982,7 +982,7 @@ impl BlockMapReader<'_> { return Some(cursor.start().1); } } - cursor.next(&()); + cursor.next(); } None @@ -1293,7 +1293,7 @@ impl BlockSnapshot { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&BlockRow(rows.start), Bias::Right, &()); + cursor.seek(&BlockRow(rows.start), Bias::Right); let transform_output_start = cursor.start().0.0; let transform_input_start = cursor.start().1.0; @@ -1325,7 +1325,7 @@ impl BlockSnapshot { pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&start_row, Bias::Right, &()); + cursor.seek(&start_row, Bias::Right); let (output_start, input_start) = cursor.start(); let overshoot = if cursor .item() @@ -1346,9 +1346,9 @@ impl BlockSnapshot { pub fn blocks_in_range(&self, rows: Range) -> impl Iterator { let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&BlockRow(rows.start), Bias::Left, &()); - while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start { - cursor.next(&()); + cursor.seek(&BlockRow(rows.start), Bias::Left); + while cursor.start().0 < rows.start && cursor.end().0 <= rows.start { + cursor.next(); } std::iter::from_fn(move || { @@ -1364,10 +1364,10 @@ impl BlockSnapshot { break; } if let Some(block) = &transform.block { - cursor.next(&()); + cursor.next(); return Some((start_row, block)); } else { - cursor.next(&()); + cursor.next(); } } None @@ -1377,7 +1377,7 @@ impl BlockSnapshot { pub fn sticky_header_excerpt(&self, position: f32) -> Option> { let top_row = position as u32; let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&BlockRow(top_row), Bias::Right, &()); + cursor.seek(&BlockRow(top_row), Bias::Right); while let Some(transform) = cursor.item() { match &transform.block { @@ -1386,7 +1386,7 @@ impl BlockSnapshot { } Some(block) if block.is_buffer_header() => return None, _ => { - cursor.prev(&()); + cursor.prev(); continue; } } @@ -1414,7 +1414,7 @@ impl BlockSnapshot { let wrap_row = WrapRow(wrap_point.row()); let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&wrap_row, Bias::Left, &()); + cursor.seek(&wrap_row, Bias::Left); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1425,7 +1425,7 @@ impl BlockSnapshot { break; } - cursor.next(&()); + cursor.next(); } None @@ -1442,7 +1442,7 @@ impl BlockSnapshot { pub fn longest_row_in_range(&self, range: Range) -> BlockRow { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); let mut longest_row = range.start; let mut longest_row_chars = 0; @@ -1453,7 +1453,7 @@ impl BlockSnapshot { let wrap_start_row = input_start.0 + overshoot; let wrap_end_row = cmp::min( input_start.0 + (range.end.0 - output_start.0), - cursor.end(&()).1.0, + cursor.end().1.0, ); let summary = self .wrap_snapshot @@ -1461,12 +1461,12 @@ impl BlockSnapshot { longest_row = BlockRow(range.start.0 + summary.longest_row); longest_row_chars = summary.longest_row_chars; } - cursor.next(&()); + cursor.next(); } let cursor_start_row = cursor.start().0; if range.end > cursor_start_row { - let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right, &()); + let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right); if summary.longest_row_chars > longest_row_chars { longest_row = BlockRow(cursor_start_row.0 + summary.longest_row); longest_row_chars = summary.longest_row_chars; @@ -1493,7 +1493,7 @@ impl BlockSnapshot { pub(super) fn line_len(&self, row: BlockRow) -> u32 { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&BlockRow(row.0), Bias::Right, &()); + cursor.seek(&BlockRow(row.0), Bias::Right); if let Some(transform) = cursor.item() { let (output_start, input_start) = cursor.start(); let overshoot = row.0 - output_start.0; @@ -1511,13 +1511,13 @@ impl BlockSnapshot { pub(super) fn is_block_line(&self, row: BlockRow) -> bool { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&row, Bias::Right, &()); + cursor.seek(&row, Bias::Right); cursor.item().map_or(false, |t| t.block.is_some()) } pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&row, Bias::Right, &()); + cursor.seek(&row, Bias::Right); let Some(transform) = cursor.item() else { return false; }; @@ -1529,7 +1529,7 @@ impl BlockSnapshot { .wrap_snapshot .make_wrap_point(Point::new(row.0, 0), Bias::Left); let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); - cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); + cursor.seek(&WrapRow(wrap_point.row()), Bias::Right); cursor.item().map_or(false, |transform| { transform .block @@ -1540,17 +1540,17 @@ impl BlockSnapshot { pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&BlockRow(point.row), Bias::Right, &()); + cursor.seek(&BlockRow(point.row), Bias::Right); let max_input_row = WrapRow(self.transforms.summary().input_rows); let mut search_left = - (bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end(&()).1 == max_input_row; + (bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end().1 == max_input_row; let mut reversed = false; loop { if let Some(transform) = cursor.item() { let (output_start_row, input_start_row) = cursor.start(); - let (output_end_row, input_end_row) = cursor.end(&()); + let (output_end_row, input_end_row) = cursor.end(); let output_start = Point::new(output_start_row.0, 0); let input_start = Point::new(input_start_row.0, 0); let input_end = Point::new(input_end_row.0, 0); @@ -1584,23 +1584,23 @@ impl BlockSnapshot { } if search_left { - cursor.prev(&()); + cursor.prev(); } else { - cursor.next(&()); + cursor.next(); } } else if reversed { return self.max_point(); } else { reversed = true; search_left = !search_left; - cursor.seek(&BlockRow(point.row), Bias::Right, &()); + cursor.seek(&BlockRow(point.row), Bias::Right); } } } pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); - cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); + cursor.seek(&WrapRow(wrap_point.row()), Bias::Right); if let Some(transform) = cursor.item() { if transform.block.is_some() { BlockPoint::new(cursor.start().1.0, 0) @@ -1618,7 +1618,7 @@ impl BlockSnapshot { pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); + cursor.seek(&BlockRow(block_point.row), Bias::Right); if let Some(transform) = cursor.item() { match transform.block.as_ref() { Some(block) => { @@ -1630,7 +1630,7 @@ impl BlockSnapshot { } else if bias == Bias::Left { WrapPoint::new(cursor.start().1.0, 0) } else { - let wrap_row = cursor.end(&()).1.0 - 1; + let wrap_row = cursor.end().1.0 - 1; WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) } } @@ -1650,14 +1650,14 @@ impl BlockChunks<'_> { /// Go to the next transform fn advance(&mut self) { self.input_chunk = Chunk::default(); - self.transforms.next(&()); + self.transforms.next(); while let Some(transform) = self.transforms.item() { if transform .block .as_ref() .map_or(false, |block| block.height() == 0) { - self.transforms.next(&()); + self.transforms.next(); } else { break; } @@ -1672,7 +1672,7 @@ impl BlockChunks<'_> { let start_output_row = self.transforms.start().0.0; if start_output_row < self.max_output_row { let end_input_row = cmp::min( - self.transforms.end(&()).1.0, + self.transforms.end().1.0, start_input_row + (self.max_output_row - start_output_row), ); self.input_chunks.seek(start_input_row..end_input_row); @@ -1696,7 +1696,7 @@ impl<'a> Iterator for BlockChunks<'a> { let transform = self.transforms.item()?; if transform.block.is_some() { let block_start = self.transforms.start().0.0; - let mut block_end = self.transforms.end(&()).0.0; + let mut block_end = self.transforms.end().0.0; self.advance(); if self.transforms.item().is_none() { block_end -= 1; @@ -1731,7 +1731,7 @@ impl<'a> Iterator for BlockChunks<'a> { } } - let transform_end = self.transforms.end(&()).0.0; + let transform_end = self.transforms.end().0.0; let (prefix_rows, prefix_bytes) = offset_for_row(self.input_chunk.text, transform_end - self.output_row); self.output_row += prefix_rows; @@ -1770,15 +1770,15 @@ impl Iterator for BlockRows<'_> { self.started = true; } - if self.output_row.0 >= self.transforms.end(&()).0.0 { - self.transforms.next(&()); + if self.output_row.0 >= self.transforms.end().0.0 { + self.transforms.next(); while let Some(transform) = self.transforms.item() { if transform .block .as_ref() .map_or(false, |block| block.height() == 0) { - self.transforms.next(&()); + self.transforms.next(); } else { break; } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index e6fe4270ec..bdac982fa7 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -52,15 +52,15 @@ impl CreaseSnapshot { ) -> Option<&'a Crease> { let start = snapshot.anchor_before(Point::new(row.0, 0)); let mut cursor = self.creases.cursor::(snapshot); - cursor.seek(&start, Bias::Left, snapshot); + cursor.seek(&start, Bias::Left); while let Some(item) = cursor.item() { match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) { - Ordering::Less => cursor.next(snapshot), + Ordering::Less => cursor.next(), Ordering::Equal => { if item.crease.range().start.is_valid(snapshot) { return Some(&item.crease); } else { - cursor.next(snapshot); + cursor.next(); } } Ordering::Greater => break, @@ -76,11 +76,11 @@ impl CreaseSnapshot { ) -> impl 'a + Iterator> { let start = snapshot.anchor_before(Point::new(range.start.0, 0)); let mut cursor = self.creases.cursor::(snapshot); - cursor.seek(&start, Bias::Left, snapshot); + cursor.seek(&start, Bias::Left); std::iter::from_fn(move || { while let Some(item) = cursor.item() { - cursor.next(snapshot); + cursor.next(); let crease_range = item.crease.range(); let crease_start = crease_range.start.to_point(snapshot); let crease_end = crease_range.end.to_point(snapshot); @@ -102,13 +102,13 @@ impl CreaseSnapshot { let mut cursor = self.creases.cursor::(snapshot); let mut results = Vec::new(); - cursor.next(snapshot); + cursor.next(); while let Some(item) = cursor.item() { let crease_range = item.crease.range(); let start_point = crease_range.start.to_point(snapshot); let end_point = crease_range.end.to_point(snapshot); results.push((item.id, start_point..end_point)); - cursor.next(snapshot); + cursor.next(); } results @@ -298,7 +298,7 @@ impl CreaseMap { let mut cursor = self.snapshot.creases.cursor::(snapshot); for crease in creases { let crease_range = crease.range().clone(); - new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot); + new_creases.append(cursor.slice(&crease_range, Bias::Left), snapshot); let id = self.next_id; self.next_id.0 += 1; @@ -306,7 +306,7 @@ impl CreaseMap { new_creases.push(CreaseItem { crease, id }, snapshot); new_ids.push(id); } - new_creases.append(cursor.suffix(snapshot), snapshot); + new_creases.append(cursor.suffix(), snapshot); new_creases }; new_ids @@ -332,9 +332,9 @@ impl CreaseMap { let mut cursor = self.snapshot.creases.cursor::(snapshot); for (id, range) in &removals { - new_creases.append(cursor.slice(range, Bias::Left, snapshot), snapshot); + new_creases.append(cursor.slice(range, Bias::Left), snapshot); while let Some(item) = cursor.item() { - cursor.next(snapshot); + cursor.next(); if item.id == *id { break; } else { @@ -343,7 +343,7 @@ impl CreaseMap { } } - new_creases.append(cursor.suffix(snapshot), snapshot); + new_creases.append(cursor.suffix(), snapshot); new_creases }; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index f37e7063e7..829d34ff58 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -99,7 +99,7 @@ impl FoldPoint { pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); - cursor.seek(&self, Bias::Right, &()); + cursor.seek(&self, Bias::Right); let overshoot = self.0 - cursor.start().0.0; InlayPoint(cursor.start().1.0 + overshoot) } @@ -108,7 +108,7 @@ impl FoldPoint { let mut cursor = snapshot .transforms .cursor::<(FoldPoint, TransformSummary)>(&()); - cursor.seek(&self, Bias::Right, &()); + cursor.seek(&self, Bias::Right); let overshoot = self.0 - cursor.start().1.output.lines; let mut offset = cursor.start().1.output.len; if !overshoot.is_zero() { @@ -187,10 +187,10 @@ impl FoldMapWriter<'_> { width: None, }, ); - new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); + new_tree.append(cursor.slice(&fold.range, Bias::Right), buffer); new_tree.push(fold, buffer); } - new_tree.append(cursor.suffix(buffer), buffer); + new_tree.append(cursor.suffix(), buffer); new_tree }; @@ -252,7 +252,7 @@ impl FoldMapWriter<'_> { fold_ixs_to_delete.push(*folds_cursor.start()); self.0.snapshot.fold_metadata_by_id.remove(&fold.id); } - folds_cursor.next(buffer); + folds_cursor.next(); } } @@ -263,10 +263,10 @@ impl FoldMapWriter<'_> { let mut cursor = self.0.snapshot.folds.cursor::(buffer); let mut folds = SumTree::new(buffer); for fold_ix in fold_ixs_to_delete { - folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); - cursor.next(buffer); + folds.append(cursor.slice(&fold_ix, Bias::Right), buffer); + cursor.next(); } - folds.append(cursor.suffix(buffer), buffer); + folds.append(cursor.suffix(), buffer); folds }; @@ -412,7 +412,7 @@ impl FoldMap { let mut new_transforms = SumTree::::default(); let mut cursor = self.snapshot.transforms.cursor::(&()); - cursor.seek(&InlayOffset(0), Bias::Right, &()); + cursor.seek(&InlayOffset(0), Bias::Right); while let Some(mut edit) = inlay_edits_iter.next() { if let Some(item) = cursor.item() { @@ -421,19 +421,19 @@ impl FoldMap { |transform| { if !transform.is_fold() { transform.summary.add_summary(&item.summary, &()); - cursor.next(&()); + cursor.next(); } }, &(), ); } } - new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &()); + new_transforms.append(cursor.slice(&edit.old.start, Bias::Left), &()); edit.new.start -= edit.old.start - *cursor.start(); edit.old.start = *cursor.start(); - cursor.seek(&edit.old.end, Bias::Right, &()); - cursor.next(&()); + cursor.seek(&edit.old.end, Bias::Right); + cursor.next(); let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize; loop { @@ -449,8 +449,8 @@ impl FoldMap { if next_edit.old.end >= edit.old.end { edit.old.end = next_edit.old.end; - cursor.seek(&edit.old.end, Bias::Right, &()); - cursor.next(&()); + cursor.seek(&edit.old.end, Bias::Right); + cursor.next(); } } else { break; @@ -467,11 +467,7 @@ impl FoldMap { .snapshot .folds .cursor::(&inlay_snapshot.buffer); - folds_cursor.seek( - &FoldRange(anchor..Anchor::max()), - Bias::Left, - &inlay_snapshot.buffer, - ); + folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left); let mut folds = iter::from_fn({ let inlay_snapshot = &inlay_snapshot; @@ -485,7 +481,7 @@ impl FoldMap { ..inlay_snapshot.to_inlay_offset(buffer_end), ) }); - folds_cursor.next(&inlay_snapshot.buffer); + folds_cursor.next(); item } }) @@ -558,7 +554,7 @@ impl FoldMap { } } - new_transforms.append(cursor.suffix(&()), &()); + new_transforms.append(cursor.suffix(), &()); if new_transforms.is_empty() { let text_summary = inlay_snapshot.text_summary(); push_isomorphic(&mut new_transforms, text_summary); @@ -575,31 +571,31 @@ impl FoldMap { let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&()); for mut edit in inlay_edits { - old_transforms.seek(&edit.old.start, Bias::Left, &()); + old_transforms.seek(&edit.old.start, Bias::Left); if old_transforms.item().map_or(false, |t| t.is_fold()) { edit.old.start = old_transforms.start().0; } let old_start = old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0; - old_transforms.seek_forward(&edit.old.end, Bias::Right, &()); + old_transforms.seek_forward(&edit.old.end, Bias::Right); if old_transforms.item().map_or(false, |t| t.is_fold()) { - old_transforms.next(&()); + old_transforms.next(); edit.old.end = old_transforms.start().0; } let old_end = old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0; - new_transforms.seek(&edit.new.start, Bias::Left, &()); + new_transforms.seek(&edit.new.start, Bias::Left); if new_transforms.item().map_or(false, |t| t.is_fold()) { edit.new.start = new_transforms.start().0; } let new_start = new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0; - new_transforms.seek_forward(&edit.new.end, Bias::Right, &()); + new_transforms.seek_forward(&edit.new.end, Bias::Right); if new_transforms.item().map_or(false, |t| t.is_fold()) { - new_transforms.next(&()); + new_transforms.next(); edit.new.end = new_transforms.start().0; } let new_end = @@ -656,10 +652,10 @@ impl FoldSnapshot { let mut summary = TextSummary::default(); let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); if let Some(transform) = cursor.item() { let start_in_transform = range.start.0 - cursor.start().0.0; - let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0.0; + let end_in_transform = cmp::min(range.end, cursor.end().0).0 - cursor.start().0.0; if let Some(placeholder) = transform.placeholder.as_ref() { summary = TextSummary::from( &placeholder.text @@ -678,10 +674,10 @@ impl FoldSnapshot { } } - if range.end > cursor.end(&()).0 { - cursor.next(&()); + if range.end > cursor.end().0 { + cursor.next(); summary += &cursor - .summary::<_, TransformSummary>(&range.end, Bias::Right, &()) + .summary::<_, TransformSummary>(&range.end, Bias::Right) .output; if let Some(transform) = cursor.item() { let end_in_transform = range.end.0 - cursor.start().0.0; @@ -705,19 +701,16 @@ impl FoldSnapshot { pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); if cursor.item().map_or(false, |t| t.is_fold()) { if bias == Bias::Left || point == cursor.start().0 { cursor.start().1 } else { - cursor.end(&()).1 + cursor.end().1 } } else { let overshoot = point.0 - cursor.start().0.0; - FoldPoint(cmp::min( - cursor.start().1.0 + overshoot, - cursor.end(&()).1.0, - )) + FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0)) } } @@ -742,7 +735,7 @@ impl FoldSnapshot { let fold_point = FoldPoint::new(start_row, 0); let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); - cursor.seek(&fold_point, Bias::Left, &()); + cursor.seek(&fold_point, Bias::Left); let overshoot = fold_point.0 - cursor.start().0.0; let inlay_point = InlayPoint(cursor.start().1.0 + overshoot); @@ -773,7 +766,7 @@ impl FoldSnapshot { let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false); iter::from_fn(move || { let item = folds.item(); - folds.next(&self.inlay_snapshot.buffer); + folds.next(); item }) } @@ -785,7 +778,7 @@ impl FoldSnapshot { let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&inlay_offset, Bias::Right, &()); + cursor.seek(&inlay_offset, Bias::Right); cursor.item().map_or(false, |t| t.placeholder.is_some()) } @@ -794,7 +787,7 @@ impl FoldSnapshot { .inlay_snapshot .to_inlay_point(Point::new(buffer_row.0, 0)); let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&inlay_point, Bias::Right, &()); + cursor.seek(&inlay_point, Bias::Right); loop { match cursor.item() { Some(transform) => { @@ -808,11 +801,11 @@ impl FoldSnapshot { None => return false, } - if cursor.end(&()).row() == inlay_point.row() { - cursor.next(&()); + if cursor.end().row() == inlay_point.row() { + cursor.next(); } else { inlay_point.0 += Point::new(1, 0); - cursor.seek(&inlay_point, Bias::Right, &()); + cursor.seek(&inlay_point, Bias::Right); } } } @@ -824,14 +817,14 @@ impl FoldSnapshot { highlights: Highlights<'a>, ) -> FoldChunks<'a> { let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); - transform_cursor.seek(&range.start, Bias::Right, &()); + transform_cursor.seek(&range.start, Bias::Right); let inlay_start = { let overshoot = range.start.0 - transform_cursor.start().0.0; transform_cursor.start().1 + InlayOffset(overshoot) }; - let transform_end = transform_cursor.end(&()); + let transform_end = transform_cursor.end(); let inlay_end = if transform_cursor .item() @@ -879,14 +872,14 @@ impl FoldSnapshot { pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); if let Some(transform) = cursor.item() { let transform_start = cursor.start().0.0; if transform.placeholder.is_some() { if point.0 == transform_start || matches!(bias, Bias::Left) { FoldPoint(transform_start) } else { - FoldPoint(cursor.end(&()).0.0) + FoldPoint(cursor.end().0.0) } } else { let overshoot = InlayPoint(point.0 - transform_start); @@ -945,7 +938,7 @@ fn intersecting_folds<'a>( start_cmp == Ordering::Less && end_cmp == Ordering::Greater } }); - cursor.next(buffer); + cursor.next(); cursor } @@ -1211,7 +1204,7 @@ pub struct FoldRows<'a> { impl FoldRows<'_> { pub(crate) fn seek(&mut self, row: u32) { let fold_point = FoldPoint::new(row, 0); - self.cursor.seek(&fold_point, Bias::Left, &()); + self.cursor.seek(&fold_point, Bias::Left); let overshoot = fold_point.0 - self.cursor.start().0.0; let inlay_point = InlayPoint(self.cursor.start().1.0 + overshoot); self.input_rows.seek(inlay_point.row()); @@ -1224,8 +1217,8 @@ impl Iterator for FoldRows<'_> { fn next(&mut self) -> Option { let mut traversed_fold = false; - while self.fold_point > self.cursor.end(&()).0 { - self.cursor.next(&()); + while self.fold_point > self.cursor.end().0 { + self.cursor.next(); traversed_fold = true; if self.cursor.item().is_none() { break; @@ -1330,14 +1323,14 @@ pub struct FoldChunks<'a> { impl FoldChunks<'_> { pub(crate) fn seek(&mut self, range: Range) { - self.transform_cursor.seek(&range.start, Bias::Right, &()); + self.transform_cursor.seek(&range.start, Bias::Right); let inlay_start = { let overshoot = range.start.0 - self.transform_cursor.start().0.0; self.transform_cursor.start().1 + InlayOffset(overshoot) }; - let transform_end = self.transform_cursor.end(&()); + let transform_end = self.transform_cursor.end(); let inlay_end = if self .transform_cursor @@ -1376,10 +1369,10 @@ impl<'a> Iterator for FoldChunks<'a> { self.inlay_chunk.take(); self.inlay_offset += InlayOffset(transform.summary.input.len); - while self.inlay_offset >= self.transform_cursor.end(&()).1 + while self.inlay_offset >= self.transform_cursor.end().1 && self.transform_cursor.item().is_some() { - self.transform_cursor.next(&()); + self.transform_cursor.next(); } self.output_offset.0 += placeholder.text.len(); @@ -1396,7 +1389,7 @@ impl<'a> Iterator for FoldChunks<'a> { && self.inlay_chunks.offset() != self.inlay_offset { let transform_start = self.transform_cursor.start(); - let transform_end = self.transform_cursor.end(&()); + let transform_end = self.transform_cursor.end(); let inlay_end = if self.max_output_offset < transform_end.0 { let overshoot = self.max_output_offset.0 - transform_start.0.0; transform_start.1 + InlayOffset(overshoot) @@ -1417,14 +1410,14 @@ impl<'a> Iterator for FoldChunks<'a> { if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() { let chunk = &mut inlay_chunk.chunk; let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len()); - let transform_end = self.transform_cursor.end(&()).1; + let transform_end = self.transform_cursor.end().1; let chunk_end = buffer_chunk_end.min(transform_end); chunk.text = &chunk.text [(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0]; if chunk_end == transform_end { - self.transform_cursor.next(&()); + self.transform_cursor.next(); } else if chunk_end == buffer_chunk_end { self.inlay_chunk.take(); } @@ -1456,7 +1449,7 @@ impl FoldOffset { let mut cursor = snapshot .transforms .cursor::<(FoldOffset, TransformSummary)>(&()); - cursor.seek(&self, Bias::Right, &()); + cursor.seek(&self, Bias::Right); let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { Point::new(0, (self.0 - cursor.start().0.0) as u32) } else { @@ -1470,7 +1463,7 @@ impl FoldOffset { #[cfg(test)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); - cursor.seek(&self, Bias::Right, &()); + cursor.seek(&self, Bias::Right); let overshoot = self.0 - cursor.start().0.0; InlayOffset(cursor.start().1.0 + overshoot) } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index f7a696860a..a36d18ff6d 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -263,7 +263,7 @@ pub struct InlayChunk<'a> { impl InlayChunks<'_> { pub fn seek(&mut self, new_range: Range) { - self.transforms.seek(&new_range.start, Bias::Right, &()); + self.transforms.seek(&new_range.start, Bias::Right); let buffer_range = self.snapshot.to_buffer_offset(new_range.start) ..self.snapshot.to_buffer_offset(new_range.end); @@ -296,12 +296,12 @@ impl<'a> Iterator for InlayChunks<'a> { *chunk = self.buffer_chunks.next().unwrap(); } - let desired_bytes = self.transforms.end(&()).0.0 - self.output_offset.0; + let desired_bytes = self.transforms.end().0.0 - self.output_offset.0; // If we're already at the transform boundary, skip to the next transform if desired_bytes == 0 { self.inlay_chunks = None; - self.transforms.next(&()); + self.transforms.next(); return self.next(); } @@ -397,7 +397,7 @@ impl<'a> Iterator for InlayChunks<'a> { let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| { let start = offset_in_inlay; - let end = cmp::min(self.max_output_offset, self.transforms.end(&()).0) + let end = cmp::min(self.max_output_offset, self.transforms.end().0) - self.transforms.start().0; inlay.text.chunks_in_range(start.0..end.0) }); @@ -441,9 +441,9 @@ impl<'a> Iterator for InlayChunks<'a> { } }; - if self.output_offset >= self.transforms.end(&()).0 { + if self.output_offset >= self.transforms.end().0 { self.inlay_chunks = None; - self.transforms.next(&()); + self.transforms.next(); } Some(chunk) @@ -453,7 +453,7 @@ impl<'a> Iterator for InlayChunks<'a> { impl InlayBufferRows<'_> { pub fn seek(&mut self, row: u32) { let inlay_point = InlayPoint::new(row, 0); - self.transforms.seek(&inlay_point, Bias::Left, &()); + self.transforms.seek(&inlay_point, Bias::Left); let mut buffer_point = self.transforms.start().1; let buffer_row = MultiBufferRow(if row == 0 { @@ -487,7 +487,7 @@ impl Iterator for InlayBufferRows<'_> { self.inlay_row += 1; self.transforms - .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left, &()); + .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left); Some(buffer_row) } @@ -556,18 +556,18 @@ impl InlayMap { let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&()); let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { - new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); + new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), &()); if let Some(Transform::Isomorphic(transform)) = cursor.item() { - if cursor.end(&()).0 == buffer_edit.old.start { + if cursor.end().0 == buffer_edit.old.start { push_isomorphic(&mut new_transforms, *transform); - cursor.next(&()); + cursor.next(); } } // Remove all the inlays and transforms contained by the edit. let old_start = cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0); - cursor.seek(&buffer_edit.old.end, Bias::Right, &()); + cursor.seek(&buffer_edit.old.end, Bias::Right); let old_end = cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0); @@ -625,20 +625,20 @@ impl InlayMap { // we can push its remainder. if buffer_edits_iter .peek() - .map_or(true, |edit| edit.old.start >= cursor.end(&()).0) + .map_or(true, |edit| edit.old.start >= cursor.end().0) { let transform_start = new_transforms.summary().input.len; let transform_end = - buffer_edit.new.end + (cursor.end(&()).0 - buffer_edit.old.end); + buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end); push_isomorphic( &mut new_transforms, buffer_snapshot.text_summary_for_range(transform_start..transform_end), ); - cursor.next(&()); + cursor.next(); } } - new_transforms.append(cursor.suffix(&()), &()); + new_transforms.append(cursor.suffix(), &()); if new_transforms.is_empty() { new_transforms.push(Transform::Isomorphic(Default::default()), &()); } @@ -773,7 +773,7 @@ impl InlaySnapshot { let mut cursor = self .transforms .cursor::<(InlayOffset, (InlayPoint, usize))>(&()); - cursor.seek(&offset, Bias::Right, &()); + cursor.seek(&offset, Bias::Right); let overshoot = offset.0 - cursor.start().0.0; match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -803,7 +803,7 @@ impl InlaySnapshot { let mut cursor = self .transforms .cursor::<(InlayPoint, (InlayOffset, Point))>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); let overshoot = point.0 - cursor.start().0.0; match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -822,7 +822,7 @@ impl InlaySnapshot { } pub fn to_buffer_point(&self, point: InlayPoint) -> Point { let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); match cursor.item() { Some(Transform::Isomorphic(_)) => { let overshoot = point.0 - cursor.start().0.0; @@ -834,7 +834,7 @@ impl InlaySnapshot { } pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); - cursor.seek(&offset, Bias::Right, &()); + cursor.seek(&offset, Bias::Right); match cursor.item() { Some(Transform::Isomorphic(_)) => { let overshoot = offset - cursor.start().0; @@ -847,19 +847,19 @@ impl InlaySnapshot { pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); loop { match cursor.item() { Some(Transform::Isomorphic(_)) => { - if offset == cursor.end(&()).0 { + if offset == cursor.end().0 { while let Some(Transform::Inlay(inlay)) = cursor.next_item() { if inlay.position.bias() == Bias::Right { break; } else { - cursor.next(&()); + cursor.next(); } } - return cursor.end(&()).1; + return cursor.end().1; } else { let overshoot = offset - cursor.start().0; return InlayOffset(cursor.start().1.0 + overshoot); @@ -867,7 +867,7 @@ impl InlaySnapshot { } Some(Transform::Inlay(inlay)) => { if inlay.position.bias() == Bias::Left { - cursor.next(&()); + cursor.next(); } else { return cursor.start().1; } @@ -880,19 +880,19 @@ impl InlaySnapshot { } pub fn to_inlay_point(&self, point: Point) -> InlayPoint { let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&()); - cursor.seek(&point, Bias::Left, &()); + cursor.seek(&point, Bias::Left); loop { match cursor.item() { Some(Transform::Isomorphic(_)) => { - if point == cursor.end(&()).0 { + if point == cursor.end().0 { while let Some(Transform::Inlay(inlay)) = cursor.next_item() { if inlay.position.bias() == Bias::Right { break; } else { - cursor.next(&()); + cursor.next(); } } - return cursor.end(&()).1; + return cursor.end().1; } else { let overshoot = point - cursor.start().0; return InlayPoint(cursor.start().1.0 + overshoot); @@ -900,7 +900,7 @@ impl InlaySnapshot { } Some(Transform::Inlay(inlay)) => { if inlay.position.bias() == Bias::Left { - cursor.next(&()); + cursor.next(); } else { return cursor.start().1; } @@ -914,7 +914,7 @@ impl InlaySnapshot { pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); - cursor.seek(&point, Bias::Left, &()); + cursor.seek(&point, Bias::Left); loop { match cursor.item() { Some(Transform::Isomorphic(transform)) => { @@ -923,7 +923,7 @@ impl InlaySnapshot { if inlay.position.bias() == Bias::Left { return point; } else if bias == Bias::Left { - cursor.prev(&()); + cursor.prev(); } else if transform.first_line_chars == 0 { point.0 += Point::new(1, 0); } else { @@ -932,12 +932,12 @@ impl InlaySnapshot { } else { return point; } - } else if cursor.end(&()).0 == point { + } else if cursor.end().0 == point { if let Some(Transform::Inlay(inlay)) = cursor.next_item() { if inlay.position.bias() == Bias::Right { return point; } else if bias == Bias::Right { - cursor.next(&()); + cursor.next(); } else if point.0.column == 0 { point.0.row -= 1; point.0.column = self.line_len(point.0.row); @@ -970,7 +970,7 @@ impl InlaySnapshot { } _ => return point, } - } else if point == cursor.end(&()).0 && inlay.position.bias() == Bias::Left { + } else if point == cursor.end().0 && inlay.position.bias() == Bias::Left { match cursor.next_item() { Some(Transform::Inlay(inlay)) => { if inlay.position.bias() == Bias::Right { @@ -983,9 +983,9 @@ impl InlaySnapshot { if bias == Bias::Left { point = cursor.start().0; - cursor.prev(&()); + cursor.prev(); } else { - cursor.next(&()); + cursor.next(); point = cursor.start().0; } } @@ -993,9 +993,9 @@ impl InlaySnapshot { bias = bias.invert(); if bias == Bias::Left { point = cursor.start().0; - cursor.prev(&()); + cursor.prev(); } else { - cursor.next(&()); + cursor.next(); point = cursor.start().0; } } @@ -1011,7 +1011,7 @@ impl InlaySnapshot { let mut summary = TextSummary::default(); let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); let overshoot = range.start.0 - cursor.start().0.0; match cursor.item() { @@ -1019,22 +1019,22 @@ impl InlaySnapshot { let buffer_start = cursor.start().1; let suffix_start = buffer_start + overshoot; let suffix_end = - buffer_start + (cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0); + buffer_start + (cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0); summary = self.buffer.text_summary_for_range(suffix_start..suffix_end); - cursor.next(&()); + cursor.next(); } Some(Transform::Inlay(inlay)) => { let suffix_start = overshoot; - let suffix_end = cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0; + let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0; summary = inlay.text.cursor(suffix_start).summary(suffix_end); - cursor.next(&()); + cursor.next(); } None => {} } if range.end > cursor.start().0 { summary += cursor - .summary::<_, TransformSummary>(&range.end, Bias::Right, &()) + .summary::<_, TransformSummary>(&range.end, Bias::Right) .output; let overshoot = range.end.0 - cursor.start().0.0; @@ -1060,7 +1060,7 @@ impl InlaySnapshot { pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> { let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let inlay_point = InlayPoint::new(row, 0); - cursor.seek(&inlay_point, Bias::Left, &()); + cursor.seek(&inlay_point, Bias::Left); let max_buffer_row = self.buffer.max_row(); let mut buffer_point = cursor.start().1; @@ -1101,7 +1101,7 @@ impl InlaySnapshot { highlights: Highlights<'a>, ) -> InlayChunks<'a> { let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end); let buffer_chunks = CustomHighlightsChunks::new( diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index a29bf53882..d55577826e 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -72,7 +72,7 @@ pub struct WrapRows<'a> { impl WrapRows<'_> { pub(crate) fn seek(&mut self, start_row: u32) { self.transforms - .seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); + .seek(&WrapPoint::new(start_row, 0), Bias::Left); let mut input_row = self.transforms.start().1.row(); if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { input_row += start_row - self.transforms.start().0.row(); @@ -340,7 +340,7 @@ impl WrapSnapshot { let mut tab_edits_iter = tab_edits.iter().peekable(); new_transforms = - old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &()); + old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right); while let Some(edit) = tab_edits_iter.next() { if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) { @@ -356,31 +356,29 @@ impl WrapSnapshot { )); } - old_cursor.seek_forward(&edit.old.end, Bias::Right, &()); + old_cursor.seek_forward(&edit.old.end, Bias::Right); if let Some(next_edit) = tab_edits_iter.peek() { - if next_edit.old.start > old_cursor.end(&()) { - if old_cursor.end(&()) > edit.old.end { + if next_edit.old.start > old_cursor.end() { + if old_cursor.end() > edit.old.end { let summary = self .tab_snapshot - .text_summary_for_range(edit.old.end..old_cursor.end(&())); + .text_summary_for_range(edit.old.end..old_cursor.end()); new_transforms.push_or_extend(Transform::isomorphic(summary)); } - old_cursor.next(&()); - new_transforms.append( - old_cursor.slice(&next_edit.old.start, Bias::Right, &()), - &(), - ); + old_cursor.next(); + new_transforms + .append(old_cursor.slice(&next_edit.old.start, Bias::Right), &()); } } else { - if old_cursor.end(&()) > edit.old.end { + if old_cursor.end() > edit.old.end { let summary = self .tab_snapshot - .text_summary_for_range(edit.old.end..old_cursor.end(&())); + .text_summary_for_range(edit.old.end..old_cursor.end()); new_transforms.push_or_extend(Transform::isomorphic(summary)); } - old_cursor.next(&()); - new_transforms.append(old_cursor.suffix(&()), &()); + old_cursor.next(); + new_transforms.append(old_cursor.suffix(), &()); } } } @@ -441,7 +439,6 @@ impl WrapSnapshot { new_transforms = old_cursor.slice( &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), Bias::Right, - &(), ); while let Some(edit) = row_edits.next() { @@ -516,34 +513,31 @@ impl WrapSnapshot { } new_transforms.extend(edit_transforms, &()); - old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &()); + old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right); if let Some(next_edit) = row_edits.peek() { - if next_edit.old_rows.start > old_cursor.end(&()).row() { - if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + if next_edit.old_rows.start > old_cursor.end().row() { + if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) { let summary = self.tab_snapshot.text_summary_for_range( - TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(), ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } - old_cursor.next(&()); + old_cursor.next(); new_transforms.append( - old_cursor.slice( - &TabPoint::new(next_edit.old_rows.start, 0), - Bias::Right, - &(), - ), + old_cursor + .slice(&TabPoint::new(next_edit.old_rows.start, 0), Bias::Right), &(), ); } } else { - if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) { let summary = self.tab_snapshot.text_summary_for_range( - TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(), ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } - old_cursor.next(&()); - new_transforms.append(old_cursor.suffix(&()), &()); + old_cursor.next(); + new_transforms.append(old_cursor.suffix(), &()); } } } @@ -570,19 +564,19 @@ impl WrapSnapshot { tab_edit.new.start.0.column = 0; tab_edit.new.end.0 += Point::new(1, 0); - old_cursor.seek(&tab_edit.old.start, Bias::Right, &()); + old_cursor.seek(&tab_edit.old.start, Bias::Right); let mut old_start = old_cursor.start().output.lines; old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; - old_cursor.seek(&tab_edit.old.end, Bias::Right, &()); + old_cursor.seek(&tab_edit.old.end, Bias::Right); let mut old_end = old_cursor.start().output.lines; old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; - new_cursor.seek(&tab_edit.new.start, Bias::Right, &()); + new_cursor.seek(&tab_edit.new.start, Bias::Right); let mut new_start = new_cursor.start().output.lines; new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; - new_cursor.seek(&tab_edit.new.end, Bias::Right, &()); + new_cursor.seek(&tab_edit.new.end, Bias::Right); let mut new_end = new_cursor.start().output.lines; new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; @@ -605,7 +599,7 @@ impl WrapSnapshot { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - transforms.seek(&output_start, Bias::Right, &()); + transforms.seek(&output_start, Bias::Right); let mut input_start = TabPoint(transforms.start().1.0); if transforms.item().map_or(false, |t| t.is_isomorphic()) { input_start.0 += output_start.0 - transforms.start().0.0; @@ -633,7 +627,7 @@ impl WrapSnapshot { pub fn line_len(&self, row: u32) -> u32 { let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); + cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left); if cursor .item() .map_or(false, |transform| transform.is_isomorphic()) @@ -658,10 +652,10 @@ impl WrapSnapshot { let end = WrapPoint::new(rows.end, 0); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - cursor.seek(&start, Bias::Right, &()); + cursor.seek(&start, Bias::Right); if let Some(transform) = cursor.item() { let start_in_transform = start.0 - cursor.start().0.0; - let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0.0; + let end_in_transform = cmp::min(end, cursor.end().0).0 - cursor.start().0.0; if transform.is_isomorphic() { let tab_start = TabPoint(cursor.start().1.0 + start_in_transform); let tab_end = TabPoint(cursor.start().1.0 + end_in_transform); @@ -678,12 +672,12 @@ impl WrapSnapshot { }; } - cursor.next(&()); + cursor.next(); } if rows.end > cursor.start().0.row() { summary += &cursor - .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &()) + .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right) .output; if let Some(transform) = cursor.item() { @@ -712,7 +706,7 @@ impl WrapSnapshot { pub fn soft_wrap_indent(&self, row: u32) -> Option { let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); + cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right); cursor.item().and_then(|transform| { if transform.is_isomorphic() { None @@ -728,7 +722,7 @@ impl WrapSnapshot { pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> { let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); + transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left); let mut input_row = transforms.start().1.row(); if transforms.item().map_or(false, |t| t.is_isomorphic()) { input_row += start_row - transforms.start().0.row(); @@ -748,7 +742,7 @@ impl WrapSnapshot { pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); let mut tab_point = cursor.start().1.0; if cursor.item().map_or(false, |t| t.is_isomorphic()) { tab_point += point.0 - cursor.start().0.0; @@ -766,14 +760,14 @@ impl WrapSnapshot { pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0)) } pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { let mut cursor = self.transforms.cursor::(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); if cursor.item().map_or(false, |t| !t.is_isomorphic()) { point = *cursor.start(); *point.column_mut() -= 1; @@ -791,16 +785,16 @@ impl WrapSnapshot { *point.column_mut() = 0; let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); if cursor.item().is_none() { - cursor.prev(&()); + cursor.prev(); } while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { - return cmp::min(cursor.end(&()).0.row(), point.row()); + return cmp::min(cursor.end().0.row(), point.row()); } else { - cursor.prev(&()); + cursor.prev(); } } @@ -811,12 +805,12 @@ impl WrapSnapshot { point.0 += Point::new(1, 0); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { return Some(cmp::max(cursor.start().0.row(), point.row())); } else { - cursor.next(&()); + cursor.next(); } } @@ -889,7 +883,7 @@ impl WrapChunks<'_> { pub(crate) fn seek(&mut self, rows: Range) { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); - self.transforms.seek(&output_start, Bias::Right, &()); + self.transforms.seek(&output_start, Bias::Right); let mut input_start = TabPoint(self.transforms.start().1.0); if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { input_start.0 += output_start.0 - self.transforms.start().0.0; @@ -930,7 +924,7 @@ impl<'a> Iterator for WrapChunks<'a> { } self.output_position.0 += summary; - self.transforms.next(&()); + self.transforms.next(); return Some(Chunk { text: &display_text[start_ix..end_ix], ..Default::default() @@ -942,7 +936,7 @@ impl<'a> Iterator for WrapChunks<'a> { } let mut input_len = 0; - let transform_end = self.transforms.end(&()).0; + let transform_end = self.transforms.end().0; for c in self.input_chunk.text.chars() { let char_len = c.len_utf8(); input_len += char_len; @@ -954,7 +948,7 @@ impl<'a> Iterator for WrapChunks<'a> { } if self.output_position >= transform_end { - self.transforms.next(&()); + self.transforms.next(); break; } } @@ -982,7 +976,7 @@ impl Iterator for WrapRows<'_> { self.output_row += 1; self.transforms - .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left, &()); + .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left); if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { self.input_buffer_row = self.input_buffer_rows.next().unwrap(); self.soft_wrapped = false; diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index d4c9e37895..fc350a5a15 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -296,7 +296,7 @@ impl GitBlame { let row = info .buffer_row .filter(|_| info.buffer_id == Some(buffer_id))?; - cursor.seek_forward(&row, Bias::Right, &()); + cursor.seek_forward(&row, Bias::Right); cursor.item()?.blame.clone() }) } @@ -389,7 +389,7 @@ impl GitBlame { } } - new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &()); + new_entries.append(cursor.slice(&edit.old.start, Bias::Right), &()); if edit.new.start > new_entries.summary().rows { new_entries.push( @@ -401,7 +401,7 @@ impl GitBlame { ); } - cursor.seek(&edit.old.end, Bias::Right, &()); + cursor.seek(&edit.old.end, Bias::Right); if !edit.new.is_empty() { new_entries.push( GitBlameEntry { @@ -412,7 +412,7 @@ impl GitBlame { ); } - let old_end = cursor.end(&()); + let old_end = cursor.end(); if row_edits .peek() .map_or(true, |next_edit| next_edit.old.start >= old_end) @@ -421,18 +421,18 @@ impl GitBlame { if old_end > edit.old.end { new_entries.push( GitBlameEntry { - rows: cursor.end(&()) - edit.old.end, + rows: cursor.end() - edit.old.end, blame: entry.blame.clone(), }, &(), ); } - cursor.next(&()); + cursor.next(); } } } - new_entries.append(cursor.suffix(&()), &()); + new_entries.append(cursor.suffix(), &()); drop(cursor); self.buffer_snapshot = new_snapshot; diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index f24d38794f..328a6a4cc1 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -249,8 +249,8 @@ impl ListState { let state = &mut *self.0.borrow_mut(); let mut old_items = state.items.cursor::(&()); - let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &()); - old_items.seek_forward(&Count(old_range.end), Bias::Right, &()); + let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right); + old_items.seek_forward(&Count(old_range.end), Bias::Right); let mut spliced_count = 0; new_items.extend( @@ -260,7 +260,7 @@ impl ListState { }), &(), ); - new_items.append(old_items.suffix(&()), &()); + new_items.append(old_items.suffix(), &()); drop(old_items); state.items = new_items; @@ -300,14 +300,14 @@ impl ListState { let current_offset = self.logical_scroll_top(); let state = &mut *self.0.borrow_mut(); let mut cursor = state.items.cursor::(&()); - cursor.seek(&Count(current_offset.item_ix), Bias::Right, &()); + cursor.seek(&Count(current_offset.item_ix), Bias::Right); let start_pixel_offset = cursor.start().height + current_offset.offset_in_item; let new_pixel_offset = (start_pixel_offset + distance).max(px(0.)); if new_pixel_offset > start_pixel_offset { - cursor.seek_forward(&Height(new_pixel_offset), Bias::Right, &()); + cursor.seek_forward(&Height(new_pixel_offset), Bias::Right); } else { - cursor.seek(&Height(new_pixel_offset), Bias::Right, &()); + cursor.seek(&Height(new_pixel_offset), Bias::Right); } state.logical_scroll_top = Some(ListOffset { @@ -343,11 +343,11 @@ impl ListState { scroll_top.offset_in_item = px(0.); } else { let mut cursor = state.items.cursor::(&()); - cursor.seek(&Count(ix + 1), Bias::Right, &()); + cursor.seek(&Count(ix + 1), Bias::Right); let bottom = cursor.start().height + padding.top; let goal_top = px(0.).max(bottom - height + padding.bottom); - cursor.seek(&Height(goal_top), Bias::Left, &()); + cursor.seek(&Height(goal_top), Bias::Left); let start_ix = cursor.start().count; let start_item_top = cursor.start().height; @@ -372,11 +372,11 @@ impl ListState { } let mut cursor = state.items.cursor::<(Count, Height)>(&()); - cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); let scroll_top = cursor.start().1.0 + scroll_top.offset_in_item; - cursor.seek_forward(&Count(ix), Bias::Right, &()); + cursor.seek_forward(&Count(ix), Bias::Right); if let Some(&ListItem::Measured { size, .. }) = cursor.item() { let &(Count(count), Height(top)) = cursor.start(); if count == ix { @@ -431,7 +431,7 @@ impl ListState { let mut cursor = state.items.cursor::(&()); let summary: ListItemSummary = - cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right, &()); + cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right); let content_height = state.items.summary().height; let drag_offset = // if dragging the scrollbar, we want to offset the point if the height changed @@ -450,9 +450,9 @@ impl ListState { impl StateInner { fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range { let mut cursor = self.items.cursor::(&()); - cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); let start_y = cursor.start().height + scroll_top.offset_in_item; - cursor.seek_forward(&Height(start_y + height), Bias::Left, &()); + cursor.seek_forward(&Height(start_y + height), Bias::Left); scroll_top.item_ix..cursor.start().count + 1 } @@ -482,7 +482,7 @@ impl StateInner { self.logical_scroll_top = None; } else { let mut cursor = self.items.cursor::(&()); - cursor.seek(&Height(new_scroll_top), Bias::Right, &()); + cursor.seek(&Height(new_scroll_top), Bias::Right); let item_ix = cursor.start().count; let offset_in_item = new_scroll_top - cursor.start().height; self.logical_scroll_top = Some(ListOffset { @@ -523,7 +523,7 @@ impl StateInner { fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels { let mut cursor = self.items.cursor::(&()); - cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &()); + cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right); cursor.start().height + logical_scroll_top.offset_in_item } @@ -553,7 +553,7 @@ impl StateInner { let mut cursor = old_items.cursor::(&()); // Render items after the scroll top, including those in the trailing overdraw - cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); for (ix, item) in cursor.by_ref().enumerate() { let visible_height = rendered_height - scroll_top.offset_in_item; if visible_height >= available_height + self.overdraw { @@ -592,13 +592,13 @@ impl StateInner { rendered_height += padding.bottom; // Prepare to start walking upward from the item at the scroll top. - cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); // If the rendered items do not fill the visible region, then adjust // the scroll top upward. if rendered_height - scroll_top.offset_in_item < available_height { while rendered_height < available_height { - cursor.prev(&()); + cursor.prev(); if let Some(item) = cursor.item() { let item_index = cursor.start().0; let mut element = (self.render_item)(item_index, window, cx); @@ -645,7 +645,7 @@ impl StateInner { // Measure items in the leading overdraw let mut leading_overdraw = scroll_top.offset_in_item; while leading_overdraw < self.overdraw { - cursor.prev(&()); + cursor.prev(); if let Some(item) = cursor.item() { let size = if let ListItem::Measured { size, .. } = item { *size @@ -666,10 +666,10 @@ impl StateInner { let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len()); let mut cursor = old_items.cursor::(&()); - let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &()); + let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right); new_items.extend(measured_items, &()); - cursor.seek(&Count(measured_range.end), Bias::Right, &()); - new_items.append(cursor.suffix(&()), &()); + cursor.seek(&Count(measured_range.end), Bias::Right); + new_items.append(cursor.suffix(), &()); self.items = new_items; // If none of the visible items are focused, check if an off-screen item is focused @@ -679,7 +679,7 @@ impl StateInner { let mut cursor = self .items .filter::<_, Count>(&(), |summary| summary.has_focus_handles); - cursor.next(&()); + cursor.next(); while let Some(item) = cursor.item() { if item.contains_focused(window, cx) { let item_index = cursor.start().0; @@ -692,7 +692,7 @@ impl StateInner { }); break; } - cursor.next(&()); + cursor.next(); } } @@ -741,7 +741,7 @@ impl StateInner { }); } else if autoscroll_bounds.bottom() > bounds.bottom() { let mut cursor = self.items.cursor::(&()); - cursor.seek(&Count(item.index), Bias::Right, &()); + cursor.seek(&Count(item.index), Bias::Right); let mut height = bounds.size.height - padding.top - padding.bottom; // Account for the height of the element down until the autoscroll bottom. @@ -749,7 +749,7 @@ impl StateInner { // Keep decreasing the scroll top until we fill all the available space. while height > Pixels::ZERO { - cursor.prev(&()); + cursor.prev(); let Some(item) = cursor.item() else { break }; let size = item.size().unwrap_or_else(|| { @@ -806,7 +806,7 @@ impl StateInner { self.logical_scroll_top = None; } else { let mut cursor = self.items.cursor::(&()); - cursor.seek(&Height(new_scroll_top), Bias::Right, &()); + cursor.seek(&Height(new_scroll_top), Bias::Right); let item_ix = cursor.start().count; let offset_in_item = new_scroll_top - cursor.start().height; diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 661e3ef217..613c445652 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -158,17 +158,17 @@ impl DiagnosticSet { }); if reversed { - cursor.prev(buffer); + cursor.prev(); } else { - cursor.next(buffer); + cursor.next(); } iter::from_fn({ move || { if let Some(diagnostic) = cursor.item() { if reversed { - cursor.prev(buffer); + cursor.prev(); } else { - cursor.next(buffer); + cursor.next(); } Some(diagnostic.resolve(buffer)) } else { diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index da05416e89..f441114a90 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -297,10 +297,10 @@ impl SyntaxSnapshot { let mut first_edit_ix_for_depth = 0; let mut prev_depth = 0; let mut cursor = self.layers.cursor::(text); - cursor.next(text); + cursor.next(); 'outer: loop { - let depth = cursor.end(text).max_depth; + let depth = cursor.end().max_depth; if depth > prev_depth { first_edit_ix_for_depth = 0; prev_depth = depth; @@ -313,7 +313,7 @@ impl SyntaxSnapshot { position: edit_range.start, }; if target.cmp(cursor.start(), text).is_gt() { - let slice = cursor.slice(&target, Bias::Left, text); + let slice = cursor.slice(&target, Bias::Left); layers.append(slice, text); } } @@ -327,7 +327,6 @@ impl SyntaxSnapshot { language: None, }, Bias::Left, - text, ); layers.append(slice, text); continue; @@ -394,10 +393,10 @@ impl SyntaxSnapshot { } layers.push(layer, text); - cursor.next(text); + cursor.next(); } - layers.append(cursor.suffix(text), text); + layers.append(cursor.suffix(), text); drop(cursor); self.layers = layers; } @@ -420,7 +419,7 @@ impl SyntaxSnapshot { let mut cursor = self .layers .filter::<_, ()>(text, |summary| summary.contains_unknown_injections); - cursor.next(text); + cursor.next(); while let Some(layer) = cursor.item() { let SyntaxLayerContent::Pending { language_name } = &layer.content else { unreachable!() @@ -436,7 +435,7 @@ impl SyntaxSnapshot { resolved_injection_ranges.push(range); } - cursor.next(text); + cursor.next(); } drop(cursor); @@ -469,7 +468,7 @@ impl SyntaxSnapshot { let max_depth = self.layers.summary().max_depth; let mut cursor = self.layers.cursor::(text); - cursor.next(text); + cursor.next(); let mut layers = SumTree::new(text); let mut changed_regions = ChangeRegionSet::default(); @@ -514,7 +513,7 @@ impl SyntaxSnapshot { }; let mut done = cursor.item().is_none(); - while !done && position.cmp(&cursor.end(text), text).is_gt() { + while !done && position.cmp(&cursor.end(), text).is_gt() { done = true; let bounded_position = SyntaxLayerPositionBeforeChange { @@ -522,16 +521,16 @@ impl SyntaxSnapshot { change: changed_regions.start_position(), }; if bounded_position.cmp(cursor.start(), text).is_gt() { - let slice = cursor.slice(&bounded_position, Bias::Left, text); + let slice = cursor.slice(&bounded_position, Bias::Left); if !slice.is_empty() { layers.append(slice, text); - if changed_regions.prune(cursor.end(text), text) { + if changed_regions.prune(cursor.end(), text) { done = false; } } } - while position.cmp(&cursor.end(text), text).is_gt() { + while position.cmp(&cursor.end(), text).is_gt() { let Some(layer) = cursor.item() else { break }; if changed_regions.intersects(layer, text) { @@ -555,8 +554,8 @@ impl SyntaxSnapshot { layers.push(layer.clone(), text); } - cursor.next(text); - if changed_regions.prune(cursor.end(text), text) { + cursor.next(); + if changed_regions.prune(cursor.end(), text) { done = false; } } @@ -572,7 +571,7 @@ impl SyntaxSnapshot { if layer.range.to_offset(text) == (step_start_byte..step_end_byte) && layer.content.language_id() == step.language.id() { - cursor.next(text); + cursor.next(); } else { old_layer = None; } @@ -918,7 +917,7 @@ impl SyntaxSnapshot { } }); - cursor.next(buffer); + cursor.next(); iter::from_fn(move || { while let Some(layer) = cursor.item() { let mut info = None; @@ -940,7 +939,7 @@ impl SyntaxSnapshot { }); } } - cursor.next(buffer); + cursor.next(); if info.is_some() { return info; } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 2cc8ea59ab..f0913e30fb 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1211,7 +1211,7 @@ impl MultiBuffer { let buffer = buffer_state.buffer.read(cx); for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left, &()); + cursor.seek(excerpt_id, Bias::Left); if let Some(excerpt) = cursor.item() { if excerpt.locator == *excerpt_id { let excerpt_buffer_start = @@ -1322,7 +1322,7 @@ impl MultiBuffer { let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); - cursor.seek(&Some(start_locator), Bias::Left, &()); + cursor.seek(&Some(start_locator), Bias::Left); while let Some(excerpt) = cursor.item() { if excerpt.locator > *end_locator { break; @@ -1347,7 +1347,7 @@ impl MultiBuffer { goal: selection.goal, }); - cursor.next(&()); + cursor.next(); } } @@ -1769,13 +1769,13 @@ impl MultiBuffer { let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); let mut excerpts_cursor = snapshot.excerpts.cursor::>(&()); - excerpts_cursor.next(&()); + excerpts_cursor.next(); loop { let new = new_iter.peek(); let existing = if let Some(existing_id) = existing_iter.peek() { let locator = snapshot.excerpt_locator_for_id(*existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &()); + excerpts_cursor.seek_forward(&Some(locator), Bias::Left); if let Some(excerpt) = excerpts_cursor.item() { if excerpt.buffer_id != buffer_snapshot.remote_id() { to_remove.push(*existing_id); @@ -1970,7 +1970,7 @@ impl MultiBuffer { let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); let mut cursor = snapshot.excerpts.cursor::>(&()); - let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &()); + let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right); prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); let edit_start = ExcerptOffset::new(new_excerpts.summary().text.len); @@ -2019,7 +2019,7 @@ impl MultiBuffer { let edit_end = ExcerptOffset::new(new_excerpts.summary().text.len); - let suffix = cursor.suffix(&()); + let suffix = cursor.suffix(); let changed_trailing_excerpt = suffix.is_empty(); new_excerpts.append(suffix, &()); drop(cursor); @@ -2104,7 +2104,7 @@ impl MultiBuffer { .into_iter() .flatten() { - cursor.seek_forward(&Some(locator), Bias::Left, &()); + cursor.seek_forward(&Some(locator), Bias::Left); if let Some(excerpt) = cursor.item() { if excerpt.locator == *locator { excerpts.push((excerpt.id, excerpt.range.clone())); @@ -2124,25 +2124,25 @@ impl MultiBuffer { let mut diff_transforms = snapshot .diff_transforms .cursor::<(ExcerptDimension, OutputDimension)>(&()); - diff_transforms.next(&()); + diff_transforms.next(); let locators = buffers .get(&buffer_id) .into_iter() .flat_map(|state| &state.excerpts); let mut result = Vec::new(); for locator in locators { - excerpts.seek_forward(&Some(locator), Bias::Left, &()); + excerpts.seek_forward(&Some(locator), Bias::Left); if let Some(excerpt) = excerpts.item() { if excerpt.locator == *locator { let excerpt_start = excerpts.start().1.clone(); let excerpt_end = ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines); - diff_transforms.seek_forward(&excerpt_start, Bias::Left, &()); + diff_transforms.seek_forward(&excerpt_start, Bias::Left); let overshoot = excerpt_start.0 - diff_transforms.start().0.0; let start = diff_transforms.start().1.0 + overshoot; - diff_transforms.seek_forward(&excerpt_end, Bias::Right, &()); + diff_transforms.seek_forward(&excerpt_end, Bias::Right); let overshoot = excerpt_end.0 - diff_transforms.start().0.0; let end = diff_transforms.start().1.0 + overshoot; @@ -2290,7 +2290,7 @@ impl MultiBuffer { self.paths_by_excerpt.remove(&excerpt_id); // Seek to the next excerpt to remove, preserving any preceding excerpts. let locator = snapshot.excerpt_locator_for_id(excerpt_id); - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); + new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &()); if let Some(mut excerpt) = cursor.item() { if excerpt.id != excerpt_id { @@ -2311,7 +2311,7 @@ impl MultiBuffer { removed_buffer_ids.push(excerpt.buffer_id); } } - cursor.next(&()); + cursor.next(); // Skip over any subsequent excerpts that are also removed. if let Some(&next_excerpt_id) = excerpt_ids.peek() { @@ -2344,7 +2344,7 @@ impl MultiBuffer { }); } } - let suffix = cursor.suffix(&()); + let suffix = cursor.suffix(); let changed_trailing_excerpt = suffix.is_empty(); new_excerpts.append(suffix, &()); drop(cursor); @@ -2493,7 +2493,7 @@ impl MultiBuffer { let mut cursor = snapshot .excerpts .cursor::<(Option<&Locator>, ExcerptOffset)>(&()); - cursor.seek_forward(&Some(locator), Bias::Left, &()); + cursor.seek_forward(&Some(locator), Bias::Left); if let Some(excerpt) = cursor.item() { if excerpt.locator == *locator { let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); @@ -2724,7 +2724,7 @@ impl MultiBuffer { let snapshot = self.read(cx); let mut cursor = snapshot.diff_transforms.cursor::(&()); let offset_range = range.to_offset(&snapshot); - cursor.seek(&offset_range.start, Bias::Left, &()); + cursor.seek(&offset_range.start, Bias::Left); while let Some(item) = cursor.item() { if *cursor.start() >= offset_range.end && *cursor.start() > offset_range.start { break; @@ -2732,7 +2732,7 @@ impl MultiBuffer { if item.hunk_info().is_some() { return true; } - cursor.next(&()); + cursor.next(); } false } @@ -2746,7 +2746,7 @@ impl MultiBuffer { let end = snapshot.point_to_offset(Point::new(range.end.row + 1, 0)); let start = start.saturating_sub(1); let end = snapshot.len().min(end + 1); - cursor.seek(&start, Bias::Right, &()); + cursor.seek(&start, Bias::Right); while let Some(item) = cursor.item() { if *cursor.start() >= end { break; @@ -2754,7 +2754,7 @@ impl MultiBuffer { if item.hunk_info().is_some() { return true; } - cursor.next(&()); + cursor.next(); } } false @@ -2848,7 +2848,7 @@ impl MultiBuffer { .cursor::<(Option<&Locator>, ExcerptOffset)>(&()); let mut edits = Vec::>::new(); - let prefix = cursor.slice(&Some(locator), Bias::Left, &()); + let prefix = cursor.slice(&Some(locator), Bias::Left); new_excerpts.append(prefix, &()); let mut excerpt = cursor.item().unwrap().clone(); @@ -2883,9 +2883,9 @@ impl MultiBuffer { new_excerpts.push(excerpt, &()); - cursor.next(&()); + cursor.next(); - new_excerpts.append(cursor.suffix(&()), &()); + new_excerpts.append(cursor.suffix(), &()); drop(cursor); snapshot.excerpts = new_excerpts; @@ -2925,7 +2925,7 @@ impl MultiBuffer { let mut edits = Vec::>::new(); for locator in &locators { - let prefix = cursor.slice(&Some(locator), Bias::Left, &()); + let prefix = cursor.slice(&Some(locator), Bias::Left); new_excerpts.append(prefix, &()); let mut excerpt = cursor.item().unwrap().clone(); @@ -2987,10 +2987,10 @@ impl MultiBuffer { new_excerpts.push(excerpt, &()); - cursor.next(&()); + cursor.next(); } - new_excerpts.append(cursor.suffix(&()), &()); + new_excerpts.append(cursor.suffix(), &()); drop(cursor); snapshot.excerpts = new_excerpts; @@ -3070,7 +3070,7 @@ impl MultiBuffer { .cursor::<(Option<&Locator>, ExcerptOffset)>(&()); for (locator, buffer, buffer_edited) in excerpts_to_edit { - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); + new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer.read(cx); let buffer_id = buffer.remote_id(); @@ -3112,9 +3112,9 @@ impl MultiBuffer { } new_excerpts.push(new_excerpt, &()); - cursor.next(&()); + cursor.next(); } - new_excerpts.append(cursor.suffix(&()), &()); + new_excerpts.append(cursor.suffix(), &()); drop(cursor); snapshot.excerpts = new_excerpts; @@ -3145,23 +3145,22 @@ impl MultiBuffer { let mut excerpt_edits = excerpt_edits.into_iter().peekable(); while let Some(edit) = excerpt_edits.next() { - excerpts.seek_forward(&edit.new.start, Bias::Right, &()); + excerpts.seek_forward(&edit.new.start, Bias::Right); if excerpts.item().is_none() && *excerpts.start() == edit.new.start { - excerpts.prev(&()); + excerpts.prev(); } // Keep any transforms that are before the edit. if at_transform_boundary { at_transform_boundary = false; - let transforms_before_edit = - old_diff_transforms.slice(&edit.old.start, Bias::Left, &()); + let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left); self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit); if let Some(transform) = old_diff_transforms.item() { - if old_diff_transforms.end(&()).0 == edit.old.start + if old_diff_transforms.end().0 == edit.old.start && old_diff_transforms.start().0 < edit.old.start { self.push_diff_transform(&mut new_diff_transforms, transform.clone()); - old_diff_transforms.next(&()); + old_diff_transforms.next(); } } } @@ -3203,7 +3202,7 @@ impl MultiBuffer { // then recreate the content up to the end of this transform, to prepare // for reusing additional slices of the old transforms. if excerpt_edits.peek().map_or(true, |next_edit| { - next_edit.old.start >= old_diff_transforms.end(&()).0 + next_edit.old.start >= old_diff_transforms.end().0 }) { let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end) && match old_diff_transforms.item() { @@ -3218,8 +3217,8 @@ impl MultiBuffer { let mut excerpt_offset = edit.new.end; if !keep_next_old_transform { - excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end; - old_diff_transforms.next(&()); + excerpt_offset += old_diff_transforms.end().0 - edit.old.end; + old_diff_transforms.next(); } old_expanded_hunks.clear(); @@ -3234,7 +3233,7 @@ impl MultiBuffer { } // Keep any transforms that are after the last edit. - self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix(&())); + self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix()); // Ensure there's always at least one buffer content transform. if new_diff_transforms.is_empty() { @@ -3283,10 +3282,10 @@ impl MultiBuffer { ); old_expanded_hunks.insert(hunk_info); } - if old_diff_transforms.end(&()).0 > edit.old.end { + if old_diff_transforms.end().0 > edit.old.end { break; } - old_diff_transforms.next(&()); + old_diff_transforms.next(); } // Avoid querying diff hunks if there's no possibility of hunks being expanded. @@ -3413,8 +3412,8 @@ impl MultiBuffer { } } - if excerpts.end(&()) <= edit.new.end { - excerpts.next(&()); + if excerpts.end() <= edit.new.end { + excerpts.next(); } else { break; } @@ -3439,9 +3438,9 @@ impl MultiBuffer { *summary, ) { let mut cursor = subtree.cursor::<()>(&()); - cursor.next(&()); - cursor.next(&()); - new_transforms.append(cursor.suffix(&()), &()); + cursor.next(); + cursor.next(); + new_transforms.append(cursor.suffix(), &()); return; } } @@ -4715,14 +4714,14 @@ impl MultiBufferSnapshot { { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut cursor = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); let Some(first_transform) = cursor.item() else { return D::from_text_summary(&TextSummary::default()); }; let diff_transform_start = cursor.start().0; - let diff_transform_end = cursor.end(&()).0; + let diff_transform_end = cursor.end().0; let diff_start = range.start; let start_overshoot = diff_start - diff_transform_start; let end_overshoot = std::cmp::min(range.end, diff_transform_end) - diff_transform_start; @@ -4765,12 +4764,10 @@ impl MultiBufferSnapshot { return result; } - cursor.next(&()); - result.add_assign(&D::from_text_summary(&cursor.summary( - &range.end, - Bias::Right, - &(), - ))); + cursor.next(); + result.add_assign(&D::from_text_summary( + &cursor.summary(&range.end, Bias::Right), + )); let Some(last_transform) = cursor.item() else { return result; @@ -4813,9 +4810,9 @@ impl MultiBufferSnapshot { // let mut range = range.start..range.end; let mut summary = D::zero(&()); let mut cursor = self.excerpts.cursor::(&()); - cursor.seek(&range.start, Bias::Right, &()); + cursor.seek(&range.start, Bias::Right); if let Some(excerpt) = cursor.item() { - let mut end_before_newline = cursor.end(&()); + let mut end_before_newline = cursor.end(); if excerpt.has_trailing_newline { end_before_newline -= ExcerptOffset::new(1); } @@ -4834,13 +4831,13 @@ impl MultiBufferSnapshot { summary.add_assign(&D::from_text_summary(&TextSummary::from("\n"))); } - cursor.next(&()); + cursor.next(); } if range.end > *cursor.start() { summary.add_assign( &cursor - .summary::<_, ExcerptDimension>(&range.end, Bias::Right, &()) + .summary::<_, ExcerptDimension>(&range.end, Bias::Right) .0, ); if let Some(excerpt) = cursor.item() { @@ -4876,11 +4873,11 @@ impl MultiBufferSnapshot { D: TextDimension + Ord + Sub, { loop { - let transform_end_position = diff_transforms.end(&()).0.0; + let transform_end_position = diff_transforms.end().0.0; let at_transform_end = excerpt_position == transform_end_position && diff_transforms.item().is_some(); if at_transform_end && anchor.text_anchor.bias == Bias::Right { - diff_transforms.next(&()); + diff_transforms.next(); continue; } @@ -4906,7 +4903,7 @@ impl MultiBufferSnapshot { ); position.add_assign(&position_in_hunk); } else if at_transform_end { - diff_transforms.next(&()); + diff_transforms.next(); continue; } } @@ -4915,7 +4912,7 @@ impl MultiBufferSnapshot { } _ => { if at_transform_end && anchor.diff_base_anchor.is_some() { - diff_transforms.next(&()); + diff_transforms.next(); continue; } let overshoot = excerpt_position - diff_transforms.start().0.0; @@ -4933,9 +4930,9 @@ impl MultiBufferSnapshot { .cursor::<(Option<&Locator>, ExcerptOffset)>(&()); let locator = self.excerpt_locator_for_id(anchor.excerpt_id); - cursor.seek(&Some(locator), Bias::Left, &()); + cursor.seek(&Some(locator), Bias::Left); if cursor.item().is_none() { - cursor.next(&()); + cursor.next(); } let mut position = cursor.start().1; @@ -4975,7 +4972,7 @@ impl MultiBufferSnapshot { let mut diff_transforms_cursor = self .diff_transforms .cursor::<(ExcerptDimension, OutputDimension)>(&()); - diff_transforms_cursor.next(&()); + diff_transforms_cursor.next(); let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { @@ -4990,9 +4987,9 @@ impl MultiBufferSnapshot { }); let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek_forward(locator, Bias::Left, &()); + cursor.seek_forward(locator, Bias::Left); if cursor.item().is_none() { - cursor.next(&()); + cursor.next(); } let excerpt_start_position = D::from_text_summary(&cursor.start().text); @@ -5022,11 +5019,8 @@ impl MultiBufferSnapshot { } if position > diff_transforms_cursor.start().0.0 { - diff_transforms_cursor.seek_forward( - &ExcerptDimension(position), - Bias::Left, - &(), - ); + diff_transforms_cursor + .seek_forward(&ExcerptDimension(position), Bias::Left); } summaries.push(self.resolve_summary_for_anchor( @@ -5036,11 +5030,8 @@ impl MultiBufferSnapshot { )); } } else { - diff_transforms_cursor.seek_forward( - &ExcerptDimension(excerpt_start_position), - Bias::Left, - &(), - ); + diff_transforms_cursor + .seek_forward(&ExcerptDimension(excerpt_start_position), Bias::Left); let position = self.resolve_summary_for_anchor( &Anchor::max(), excerpt_start_position, @@ -5099,7 +5090,7 @@ impl MultiBufferSnapshot { { let mut anchors = anchors.into_iter().enumerate().peekable(); let mut cursor = self.excerpts.cursor::>(&()); - cursor.next(&()); + cursor.next(); let mut result = Vec::new(); @@ -5108,10 +5099,10 @@ impl MultiBufferSnapshot { // Find the location where this anchor's excerpt should be. let old_locator = self.excerpt_locator_for_id(old_excerpt_id); - cursor.seek_forward(&Some(old_locator), Bias::Left, &()); + cursor.seek_forward(&Some(old_locator), Bias::Left); if cursor.item().is_none() { - cursor.next(&()); + cursor.next(); } let next_excerpt = cursor.item(); @@ -5211,13 +5202,13 @@ impl MultiBufferSnapshot { // Find the given position in the diff transforms. Determine the corresponding // offset in the excerpts, and whether the position is within a deleted hunk. let mut diff_transforms = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&()); - diff_transforms.seek(&offset, Bias::Right, &()); + diff_transforms.seek(&offset, Bias::Right); if offset == diff_transforms.start().0 && bias == Bias::Left { if let Some(prev_item) = diff_transforms.prev_item() { match prev_item { DiffTransform::DeletedHunk { .. } => { - diff_transforms.prev(&()); + diff_transforms.prev(); } _ => {} } @@ -5260,13 +5251,13 @@ impl MultiBufferSnapshot { let mut excerpts = self .excerpts .cursor::<(ExcerptOffset, Option)>(&()); - excerpts.seek(&excerpt_offset, Bias::Right, &()); + excerpts.seek(&excerpt_offset, Bias::Right); if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left { - excerpts.prev(&()); + excerpts.prev(); } if let Some(excerpt) = excerpts.item() { let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0).value; - if excerpt.has_trailing_newline && excerpt_offset == excerpts.end(&()).0 { + if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 { overshoot -= 1; bias = Bias::Right; } @@ -5297,7 +5288,7 @@ impl MultiBufferSnapshot { let excerpt_id = self.latest_excerpt_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id); let mut cursor = self.excerpts.cursor::>(&()); - cursor.seek(locator, Bias::Left, &()); + cursor.seek(locator, Bias::Left); if let Some(excerpt) = cursor.item() { if excerpt.id == excerpt_id { let text_anchor = excerpt.clip_anchor(text_anchor); @@ -5351,13 +5342,13 @@ impl MultiBufferSnapshot { let mut excerpts = self .excerpts .cursor::<(Option<&Locator>, ExcerptDimension)>(&()); - excerpts.seek(&Some(start_locator), Bias::Left, &()); - excerpts.prev(&()); + excerpts.seek(&Some(start_locator), Bias::Left); + excerpts.prev(); let mut diff_transforms = self.diff_transforms.cursor::>(&()); - diff_transforms.seek(&excerpts.start().1, Bias::Left, &()); - if diff_transforms.end(&()).excerpt_dimension < excerpts.start().1 { - diff_transforms.next(&()); + diff_transforms.seek(&excerpts.start().1, Bias::Left); + if diff_transforms.end().excerpt_dimension < excerpts.start().1 { + diff_transforms.next(); } let excerpt = excerpts.item()?; @@ -6193,7 +6184,7 @@ impl MultiBufferSnapshot { Locator::max_ref() } else { let mut cursor = self.excerpt_ids.cursor::(&()); - cursor.seek(&id, Bias::Left, &()); + cursor.seek(&id, Bias::Left); if let Some(entry) = cursor.item() { if entry.id == id { return &entry.locator; @@ -6229,7 +6220,7 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpt_ids.cursor::(&()); for id in sorted_ids { - if cursor.seek_forward(&id, Bias::Left, &()) { + if cursor.seek_forward(&id, Bias::Left) { locators.push(cursor.item().unwrap().locator.clone()); } else { panic!("invalid excerpt id {:?}", id); @@ -6253,16 +6244,16 @@ impl MultiBufferSnapshot { .excerpts .cursor::<(Option<&Locator>, ExcerptDimension)>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); - if cursor.seek(&Some(locator), Bias::Left, &()) { + if cursor.seek(&Some(locator), Bias::Left) { let start = cursor.start().1.clone(); - let end = cursor.end(&()).1; + let end = cursor.end().1; let mut diff_transforms = self .diff_transforms .cursor::<(ExcerptDimension, OutputDimension)>(&()); - diff_transforms.seek(&start, Bias::Left, &()); + diff_transforms.seek(&start, Bias::Left); let overshoot = start.0 - diff_transforms.start().0.0; let start = diff_transforms.start().1.0 + overshoot; - diff_transforms.seek(&end, Bias::Right, &()); + diff_transforms.seek(&end, Bias::Right); let overshoot = end.0 - diff_transforms.start().0.0; let end = diff_transforms.start().1.0 + overshoot; Some(start..end) @@ -6274,7 +6265,7 @@ impl MultiBufferSnapshot { pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); - if cursor.seek(&Some(locator), Bias::Left, &()) { + if cursor.seek(&Some(locator), Bias::Left) { if let Some(excerpt) = cursor.item() { return Some(excerpt.range.context.clone()); } @@ -6285,7 +6276,7 @@ impl MultiBufferSnapshot { fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek(&Some(locator), Bias::Left, &()); + cursor.seek(&Some(locator), Bias::Left); if let Some(excerpt) = cursor.item() { if excerpt.id == excerpt_id { return Some(excerpt); @@ -6333,7 +6324,7 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(&()); let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); - cursor.seek(start_locator, Bias::Left, &()); + cursor.seek(start_locator, Bias::Left); cursor .take_while(move |excerpt| excerpt.locator <= *end_locator) .flat_map(move |excerpt| { @@ -6472,11 +6463,11 @@ where fn seek(&mut self, position: &D) { self.cached_region.take(); self.diff_transforms - .seek(&OutputDimension(*position), Bias::Right, &()); + .seek(&OutputDimension(*position), Bias::Right); if self.diff_transforms.item().is_none() && *position == self.diff_transforms.start().output_dimension.0 { - self.diff_transforms.prev(&()); + self.diff_transforms.prev(); } let mut excerpt_position = self.diff_transforms.start().excerpt_dimension.0; @@ -6486,20 +6477,20 @@ where } self.excerpts - .seek(&ExcerptDimension(excerpt_position), Bias::Right, &()); + .seek(&ExcerptDimension(excerpt_position), Bias::Right); if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 { - self.excerpts.prev(&()); + self.excerpts.prev(); } } fn seek_forward(&mut self, position: &D) { self.cached_region.take(); self.diff_transforms - .seek_forward(&OutputDimension(*position), Bias::Right, &()); + .seek_forward(&OutputDimension(*position), Bias::Right); if self.diff_transforms.item().is_none() && *position == self.diff_transforms.start().output_dimension.0 { - self.diff_transforms.prev(&()); + self.diff_transforms.prev(); } let overshoot = *position - self.diff_transforms.start().output_dimension.0; @@ -6509,31 +6500,30 @@ where } self.excerpts - .seek_forward(&ExcerptDimension(excerpt_position), Bias::Right, &()); + .seek_forward(&ExcerptDimension(excerpt_position), Bias::Right); if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 { - self.excerpts.prev(&()); + self.excerpts.prev(); } } fn next_excerpt(&mut self) { - self.excerpts.next(&()); + self.excerpts.next(); self.seek_to_start_of_current_excerpt(); } fn prev_excerpt(&mut self) { - self.excerpts.prev(&()); + self.excerpts.prev(); self.seek_to_start_of_current_excerpt(); } fn seek_to_start_of_current_excerpt(&mut self) { self.cached_region.take(); - self.diff_transforms - .seek(self.excerpts.start(), Bias::Left, &()); - if self.diff_transforms.end(&()).excerpt_dimension == *self.excerpts.start() + self.diff_transforms.seek(self.excerpts.start(), Bias::Left); + if self.diff_transforms.end().excerpt_dimension == *self.excerpts.start() && self.diff_transforms.start().excerpt_dimension < *self.excerpts.start() && self.diff_transforms.next_item().is_some() { - self.diff_transforms.next(&()); + self.diff_transforms.next(); } } @@ -6541,18 +6531,18 @@ where self.cached_region.take(); match self .diff_transforms - .end(&()) + .end() .excerpt_dimension - .cmp(&self.excerpts.end(&())) + .cmp(&self.excerpts.end()) { - cmp::Ordering::Less => self.diff_transforms.next(&()), - cmp::Ordering::Greater => self.excerpts.next(&()), + cmp::Ordering::Less => self.diff_transforms.next(), + cmp::Ordering::Greater => self.excerpts.next(), cmp::Ordering::Equal => { - self.diff_transforms.next(&()); - if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&()) + self.diff_transforms.next(); + if self.diff_transforms.end().excerpt_dimension > self.excerpts.end() || self.diff_transforms.item().is_none() { - self.excerpts.next(&()); + self.excerpts.next(); } else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) = self.diff_transforms.item() { @@ -6561,7 +6551,7 @@ where .item() .map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id) { - self.excerpts.next(&()); + self.excerpts.next(); } } } @@ -6576,14 +6566,14 @@ where .excerpt_dimension .cmp(self.excerpts.start()) { - cmp::Ordering::Less => self.excerpts.prev(&()), - cmp::Ordering::Greater => self.diff_transforms.prev(&()), + cmp::Ordering::Less => self.excerpts.prev(), + cmp::Ordering::Greater => self.diff_transforms.prev(), cmp::Ordering::Equal => { - self.diff_transforms.prev(&()); + self.diff_transforms.prev(); if self.diff_transforms.start().excerpt_dimension < *self.excerpts.start() || self.diff_transforms.item().is_none() { - self.excerpts.prev(&()); + self.excerpts.prev(); } } } @@ -6603,9 +6593,9 @@ where return true; } - self.diff_transforms.prev(&()); + self.diff_transforms.prev(); let prev_transform = self.diff_transforms.item(); - self.diff_transforms.next(&()); + self.diff_transforms.next(); prev_transform.map_or(true, |next_transform| { matches!(next_transform, DiffTransform::BufferContent { .. }) @@ -6613,9 +6603,9 @@ where } fn is_at_end_of_excerpt(&mut self) -> bool { - if self.diff_transforms.end(&()).excerpt_dimension < self.excerpts.end(&()) { + if self.diff_transforms.end().excerpt_dimension < self.excerpts.end() { return false; - } else if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&()) + } else if self.diff_transforms.end().excerpt_dimension > self.excerpts.end() || self.diff_transforms.item().is_none() { return true; @@ -6636,7 +6626,7 @@ where let buffer = &excerpt.buffer; let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut buffer_start = buffer_context_start; - let overshoot = self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0; + let overshoot = self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0; buffer_start.add_assign(&overshoot); Some(buffer_start) } @@ -6659,7 +6649,7 @@ where let mut buffer_end = buffer_start; buffer_end.add_assign(&buffer_range_len); let start = self.diff_transforms.start().output_dimension.0; - let end = self.diff_transforms.end(&()).output_dimension.0; + let end = self.diff_transforms.end().output_dimension.0; return Some(MultiBufferRegion { buffer, excerpt, @@ -6693,16 +6683,16 @@ where let mut end; let mut buffer_end; let has_trailing_newline; - if self.diff_transforms.end(&()).excerpt_dimension.0 < self.excerpts.end(&()).0 { + if self.diff_transforms.end().excerpt_dimension.0 < self.excerpts.end().0 { let overshoot = - self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0; - end = self.diff_transforms.end(&()).output_dimension.0; + self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0; + end = self.diff_transforms.end().output_dimension.0; buffer_end = buffer_context_start; buffer_end.add_assign(&overshoot); has_trailing_newline = false; } else { let overshoot = - self.excerpts.end(&()).0 - self.diff_transforms.start().excerpt_dimension.0; + self.excerpts.end().0 - self.diff_transforms.start().excerpt_dimension.0; end = self.diff_transforms.start().output_dimension.0; end.add_assign(&overshoot); buffer_end = excerpt.range.context.end.summary::(buffer); @@ -7086,11 +7076,11 @@ impl<'a> MultiBufferExcerpt<'a> { /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] pub fn map_range_to_buffer(&mut self, range: Range) -> Range { self.diff_transforms - .seek(&OutputDimension(range.start), Bias::Right, &()); + .seek(&OutputDimension(range.start), Bias::Right); let start = self.map_offset_to_buffer_internal(range.start); let end = if range.end > range.start { self.diff_transforms - .seek_forward(&OutputDimension(range.end), Bias::Right, &()); + .seek_forward(&OutputDimension(range.end), Bias::Right); self.map_offset_to_buffer_internal(range.end) } else { start @@ -7123,7 +7113,7 @@ impl<'a> MultiBufferExcerpt<'a> { } let overshoot = buffer_range.start - self.buffer_offset; let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot); - self.diff_transforms.seek(&excerpt_offset, Bias::Right, &()); + self.diff_transforms.seek(&excerpt_offset, Bias::Right); if excerpt_offset.0 < self.diff_transforms.start().excerpt_dimension.0 { log::warn!( "Attempting to map a range from a buffer offset that starts before the current buffer offset" @@ -7137,7 +7127,7 @@ impl<'a> MultiBufferExcerpt<'a> { let overshoot = buffer_range.end - self.buffer_offset; let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot); self.diff_transforms - .seek_forward(&excerpt_offset, Bias::Right, &()); + .seek_forward(&excerpt_offset, Bias::Right); let overshoot = excerpt_offset.0 - self.diff_transforms.start().excerpt_dimension.0; self.diff_transforms.start().output_dimension.0 + overshoot } else { @@ -7509,7 +7499,7 @@ impl Iterator for MultiBufferRows<'_> { if let Some(next_region) = self.cursor.region() { region = next_region; } else { - if self.point == self.cursor.diff_transforms.end(&()).output_dimension.0 { + if self.point == self.cursor.diff_transforms.end().output_dimension.0 { let multibuffer_row = MultiBufferRow(self.point.row); let last_excerpt = self .cursor @@ -7615,14 +7605,14 @@ impl<'a> MultiBufferChunks<'a> { } pub fn seek(&mut self, range: Range) { - self.diff_transforms.seek(&range.end, Bias::Right, &()); + self.diff_transforms.seek(&range.end, Bias::Right); let mut excerpt_end = self.diff_transforms.start().1; if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { let overshoot = range.end - self.diff_transforms.start().0; excerpt_end.value += overshoot; } - self.diff_transforms.seek(&range.start, Bias::Right, &()); + self.diff_transforms.seek(&range.start, Bias::Right); let mut excerpt_start = self.diff_transforms.start().1; if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { let overshoot = range.start - self.diff_transforms.start().0; @@ -7636,7 +7626,7 @@ impl<'a> MultiBufferChunks<'a> { fn seek_to_excerpt_offset_range(&mut self, new_range: Range) { self.excerpt_offset_range = new_range.clone(); - self.excerpts.seek(&new_range.start, Bias::Right, &()); + self.excerpts.seek(&new_range.start, Bias::Right); if let Some(excerpt) = self.excerpts.item() { let excerpt_start = *self.excerpts.start(); if let Some(excerpt_chunks) = self @@ -7669,7 +7659,7 @@ impl<'a> MultiBufferChunks<'a> { self.excerpt_offset_range.start.value += chunk.text.len(); return Some(chunk); } else { - self.excerpts.next(&()); + self.excerpts.next(); let excerpt = self.excerpts.item()?; self.excerpt_chunks = Some(excerpt.chunks_in_range( 0..(self.excerpt_offset_range.end - *self.excerpts.start()).value, @@ -7712,12 +7702,12 @@ impl<'a> Iterator for MultiBufferChunks<'a> { if self.range.start >= self.range.end { return None; } - if self.range.start == self.diff_transforms.end(&()).0 { - self.diff_transforms.next(&()); + if self.range.start == self.diff_transforms.end().0 { + self.diff_transforms.next(); } let diff_transform_start = self.diff_transforms.start().0; - let diff_transform_end = self.diff_transforms.end(&()).0; + let diff_transform_end = self.diff_transforms.end().0; debug_assert!(self.range.start < diff_transform_end); let diff_transform = self.diff_transforms.item()?; diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index c2f18e5700..0329a53cc7 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -132,12 +132,12 @@ impl NotificationStore { } let ix = count - 1 - ix; let mut cursor = self.notifications.cursor::(&()); - cursor.seek(&Count(ix), Bias::Right, &()); + cursor.seek(&Count(ix), Bias::Right); cursor.item() } pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { let mut cursor = self.notifications.cursor::(&()); - cursor.seek(&NotificationId(id), Bias::Left, &()); + cursor.seek(&NotificationId(id), Bias::Left); if let Some(item) = cursor.item() { if item.id == id { return Some(item); @@ -365,7 +365,7 @@ impl NotificationStore { let mut old_range = 0..0; for (i, (id, new_notification)) in notifications.into_iter().enumerate() { - new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left, &()), &()); + new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left), &()); if i == 0 { old_range.start = cursor.start().1.0; @@ -374,7 +374,7 @@ impl NotificationStore { let old_notification = cursor.item(); if let Some(old_notification) = old_notification { if old_notification.id == id { - cursor.next(&()); + cursor.next(); if let Some(new_notification) = &new_notification { if new_notification.is_read { @@ -403,7 +403,7 @@ impl NotificationStore { old_range.end = cursor.start().1.0; let new_count = new_notifications.summary().count - old_range.start; - new_notifications.append(cursor.suffix(&()), &()); + new_notifications.append(cursor.suffix(), &()); drop(cursor); self.notifications = new_notifications; diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 6e3d27deff..eb16446daf 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4279,7 +4279,7 @@ impl Repository { for (repo_path, status) in &*statuses.entries { changed_paths.remove(repo_path); - if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) { + if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) { if cursor.item().is_some_and(|entry| entry.status == *status) { continue; } @@ -4292,7 +4292,7 @@ impl Repository { } let mut cursor = prev_statuses.cursor::(&()); for path in changed_paths.into_iter() { - if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) { + if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) { changed_path_statuses.push(Edit::Remove(PathKey(path.0))); } } diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index 68ed03cfe9..cd173d5714 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -72,14 +72,13 @@ impl<'a> GitTraversal<'a> { if entry.is_dir() { let mut statuses = statuses.clone(); - statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()); - let summary = - statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &()); + statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left); + let summary = statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left); self.current_entry_summary = Some(summary); } else if entry.is_file() { // For a file entry, park the cursor on the corresponding status - if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) { + if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left) { // TODO: Investigate statuses.item() being None here. self.current_entry_summary = statuses.item().map(|item| item.status.into()); } else { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 535b863b7d..515cd71331 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -41,9 +41,9 @@ impl Rope { self.push_chunk(chunk.as_slice()); let mut chunks = rope.chunks.cursor::<()>(&()); - chunks.next(&()); - chunks.next(&()); - self.chunks.append(chunks.suffix(&()), &()); + chunks.next(); + chunks.next(); + self.chunks.append(chunks.suffix(), &()); self.check_invariants(); return; } @@ -283,7 +283,7 @@ impl Rope { return self.summary().len_utf16; } let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { @@ -296,7 +296,7 @@ impl Rope { return self.summary().len; } let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { @@ -309,7 +309,7 @@ impl Rope { return self.summary().lines; } let mut cursor = self.chunks.cursor::<(usize, Point)>(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Point::zero(), |chunk| { @@ -322,7 +322,7 @@ impl Rope { return self.summary().lines_utf16(); } let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { @@ -335,7 +335,7 @@ impl Rope { return self.summary().lines_utf16(); } let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&()); - cursor.seek(&point, Bias::Left, &()); + cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { @@ -348,7 +348,7 @@ impl Rope { return self.summary().len; } let mut cursor = self.chunks.cursor::<(Point, usize)>(&()); - cursor.seek(&point, Bias::Left, &()); + cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 + cursor @@ -369,7 +369,7 @@ impl Rope { return self.summary().len; } let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&()); - cursor.seek(&point, Bias::Left, &()); + cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 + cursor.item().map_or(0, |chunk| { @@ -382,7 +382,7 @@ impl Rope { return self.summary().lines; } let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&()); - cursor.seek(&point.0, Bias::Left, &()); + cursor.seek(&point.0, Bias::Left); let overshoot = Unclipped(point.0 - cursor.start().0); cursor.start().1 + cursor.item().map_or(Point::zero(), |chunk| { @@ -392,7 +392,7 @@ impl Rope { pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { let mut cursor = self.chunks.cursor::(&()); - cursor.seek(&offset, Bias::Left, &()); + cursor.seek(&offset, Bias::Left); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); while !chunk.text.is_char_boundary(ix) { @@ -415,7 +415,7 @@ impl Rope { pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 { let mut cursor = self.chunks.cursor::(&()); - cursor.seek(&offset, Bias::Right, &()); + cursor.seek(&offset, Bias::Right); if let Some(chunk) = cursor.item() { let overshoot = offset - cursor.start(); *cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias) @@ -426,7 +426,7 @@ impl Rope { pub fn clip_point(&self, point: Point, bias: Bias) -> Point { let mut cursor = self.chunks.cursor::(&()); - cursor.seek(&point, Bias::Right, &()); + cursor.seek(&point, Bias::Right); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); *cursor.start() + chunk.as_slice().clip_point(overshoot, bias) @@ -437,7 +437,7 @@ impl Rope { pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { let mut cursor = self.chunks.cursor::(&()); - cursor.seek(&point.0, Bias::Right, &()); + cursor.seek(&point.0, Bias::Right); if let Some(chunk) = cursor.item() { let overshoot = Unclipped(point.0 - cursor.start()); *cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias) @@ -450,10 +450,6 @@ impl Rope { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column } - - pub fn ptr_eq(&self, other: &Self) -> bool { - self.chunks.ptr_eq(&other.chunks) - } } impl<'a> From<&'a str> for Rope { @@ -514,7 +510,7 @@ pub struct Cursor<'a> { impl<'a> Cursor<'a> { pub fn new(rope: &'a Rope, offset: usize) -> Self { let mut chunks = rope.chunks.cursor(&()); - chunks.seek(&offset, Bias::Right, &()); + chunks.seek(&offset, Bias::Right); Self { rope, chunks, @@ -525,7 +521,7 @@ impl<'a> Cursor<'a> { pub fn seek_forward(&mut self, end_offset: usize) { debug_assert!(end_offset >= self.offset); - self.chunks.seek_forward(&end_offset, Bias::Right, &()); + self.chunks.seek_forward(&end_offset, Bias::Right); self.offset = end_offset; } @@ -540,14 +536,14 @@ impl<'a> Cursor<'a> { let mut slice = Rope::new(); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); + let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start(); slice.push_chunk(start_chunk.slice(start_ix..end_ix)); } - if end_offset > self.chunks.end(&()) { - self.chunks.next(&()); + if end_offset > self.chunks.end() { + self.chunks.next(); slice.append(Rope { - chunks: self.chunks.slice(&end_offset, Bias::Right, &()), + chunks: self.chunks.slice(&end_offset, Bias::Right), }); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); @@ -565,13 +561,13 @@ impl<'a> Cursor<'a> { let mut summary = D::zero(&()); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); + let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start(); summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix))); } - if end_offset > self.chunks.end(&()) { - self.chunks.next(&()); - summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &())); + if end_offset > self.chunks.end() { + self.chunks.next(); + summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right)); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix))); @@ -603,10 +599,10 @@ impl<'a> Chunks<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { let mut chunks = rope.chunks.cursor(&()); let offset = if reversed { - chunks.seek(&range.end, Bias::Left, &()); + chunks.seek(&range.end, Bias::Left); range.end } else { - chunks.seek(&range.start, Bias::Right, &()); + chunks.seek(&range.start, Bias::Right); range.start }; Self { @@ -642,10 +638,10 @@ impl<'a> Chunks<'a> { Bias::Right }; - if offset >= self.chunks.end(&()) { - self.chunks.seek_forward(&offset, bias, &()); + if offset >= self.chunks.end() { + self.chunks.seek_forward(&offset, bias); } else { - self.chunks.seek(&offset, bias, &()); + self.chunks.seek(&offset, bias); } self.offset = offset; @@ -674,25 +670,25 @@ impl<'a> Chunks<'a> { found = self.offset <= self.range.end; } else { self.chunks - .search_forward(|summary| summary.text.lines.row > 0, &()); + .search_forward(|summary| summary.text.lines.row > 0); self.offset = *self.chunks.start(); if let Some(newline_ix) = self.peek().and_then(|chunk| chunk.find('\n')) { self.offset += newline_ix + 1; found = self.offset <= self.range.end; } else { - self.offset = self.chunks.end(&()); + self.offset = self.chunks.end(); } } - if self.offset == self.chunks.end(&()) { + if self.offset == self.chunks.end() { self.next(); } } if self.offset > self.range.end { self.offset = cmp::min(self.offset, self.range.end); - self.chunks.seek(&self.offset, Bias::Right, &()); + self.chunks.seek(&self.offset, Bias::Right); } found @@ -711,7 +707,7 @@ impl<'a> Chunks<'a> { let initial_offset = self.offset; if self.offset == *self.chunks.start() { - self.chunks.prev(&()); + self.chunks.prev(); } if let Some(chunk) = self.chunks.item() { @@ -729,14 +725,14 @@ impl<'a> Chunks<'a> { } self.chunks - .search_backward(|summary| summary.text.lines.row > 0, &()); + .search_backward(|summary| summary.text.lines.row > 0); self.offset = *self.chunks.start(); if let Some(chunk) = self.chunks.item() { if let Some(newline_ix) = chunk.text.rfind('\n') { self.offset += newline_ix + 1; if self.offset_is_valid() { - if self.offset == self.chunks.end(&()) { - self.chunks.next(&()); + if self.offset == self.chunks.end() { + self.chunks.next(); } return true; @@ -746,7 +742,7 @@ impl<'a> Chunks<'a> { if !self.offset_is_valid() || self.chunks.item().is_none() { self.offset = self.range.start; - self.chunks.seek(&self.offset, Bias::Right, &()); + self.chunks.seek(&self.offset, Bias::Right); } self.offset < initial_offset && self.offset == 0 @@ -765,7 +761,7 @@ impl<'a> Chunks<'a> { slice_start..slice_end } else { let slice_start = self.offset - chunk_start; - let slice_end = cmp::min(self.chunks.end(&()), self.range.end) - chunk_start; + let slice_end = cmp::min(self.chunks.end(), self.range.end) - chunk_start; slice_start..slice_end }; @@ -825,12 +821,12 @@ impl<'a> Iterator for Chunks<'a> { if self.reversed { self.offset -= chunk.len(); if self.offset <= *self.chunks.start() { - self.chunks.prev(&()); + self.chunks.prev(); } } else { self.offset += chunk.len(); - if self.offset >= self.chunks.end(&()) { - self.chunks.next(&()); + if self.offset >= self.chunks.end() { + self.chunks.next(); } } @@ -848,9 +844,9 @@ impl<'a> Bytes<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { let mut chunks = rope.chunks.cursor(&()); if reversed { - chunks.seek(&range.end, Bias::Left, &()); + chunks.seek(&range.end, Bias::Left); } else { - chunks.seek(&range.start, Bias::Right, &()); + chunks.seek(&range.start, Bias::Right); } Self { chunks, @@ -861,7 +857,7 @@ impl<'a> Bytes<'a> { pub fn peek(&self) -> Option<&'a [u8]> { let chunk = self.chunks.item()?; - if self.reversed && self.range.start >= self.chunks.end(&()) { + if self.reversed && self.range.start >= self.chunks.end() { return None; } let chunk_start = *self.chunks.start(); @@ -881,9 +877,9 @@ impl<'a> Iterator for Bytes<'a> { let result = self.peek(); if result.is_some() { if self.reversed { - self.chunks.prev(&()); + self.chunks.prev(); } else { - self.chunks.next(&()); + self.chunks.next(); } } result @@ -905,9 +901,9 @@ impl io::Read for Bytes<'_> { if len == chunk.len() { if self.reversed { - self.chunks.prev(&()); + self.chunks.prev(); } else { - self.chunks.next(&()); + self.chunks.next(); } } Ok(len) diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 8edd04afce..50a556a6d2 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -25,6 +25,7 @@ pub struct Cursor<'a, T: Item, D> { position: D, did_seek: bool, at_end: bool, + cx: &'a ::Context, } impl fmt::Debug for Cursor<'_, T, D> @@ -52,21 +53,22 @@ where T: Item, D: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree, cx: &::Context) -> Self { + pub fn new(tree: &'a SumTree, cx: &'a ::Context) -> Self { Self { tree, stack: ArrayVec::new(), position: D::zero(cx), did_seek: false, at_end: tree.is_empty(), + cx, } } - fn reset(&mut self, cx: &::Context) { + fn reset(&mut self) { self.did_seek = false; self.at_end = self.tree.is_empty(); self.stack.truncate(0); - self.position = D::zero(cx); + self.position = D::zero(self.cx); } pub fn start(&self) -> &D { @@ -74,10 +76,10 @@ where } #[track_caller] - pub fn end(&self, cx: &::Context) -> D { + pub fn end(&self) -> D { if let Some(item_summary) = self.item_summary() { let mut end = self.start().clone(); - end.add_summary(item_summary, cx); + end.add_summary(item_summary, self.cx); end } else { self.start().clone() @@ -202,12 +204,12 @@ where } #[track_caller] - pub fn prev(&mut self, cx: &::Context) { - self.search_backward(|_| true, cx) + pub fn prev(&mut self) { + self.search_backward(|_| true) } #[track_caller] - pub fn search_backward(&mut self, mut filter_node: F, cx: &::Context) + pub fn search_backward(&mut self, mut filter_node: F) where F: FnMut(&T::Summary) -> bool, { @@ -217,13 +219,13 @@ where } if self.at_end { - self.position = D::zero(cx); + self.position = D::zero(self.cx); self.at_end = self.tree.is_empty(); if !self.tree.is_empty() { self.stack.push(StackEntry { tree: self.tree, index: self.tree.0.child_summaries().len(), - position: D::from_summary(self.tree.summary(), cx), + position: D::from_summary(self.tree.summary(), self.cx), }); } } @@ -233,7 +235,7 @@ where if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) { self.position = position.clone(); } else { - self.position = D::zero(cx); + self.position = D::zero(self.cx); } let entry = self.stack.last_mut().unwrap(); @@ -247,7 +249,7 @@ where } for summary in &entry.tree.0.child_summaries()[..entry.index] { - self.position.add_summary(summary, cx); + self.position.add_summary(summary, self.cx); } entry.position = self.position.clone(); @@ -257,7 +259,7 @@ where if descending { let tree = &child_trees[entry.index]; self.stack.push(StackEntry { - position: D::zero(cx), + position: D::zero(self.cx), tree, index: tree.0.child_summaries().len() - 1, }) @@ -273,12 +275,12 @@ where } #[track_caller] - pub fn next(&mut self, cx: &::Context) { - self.search_forward(|_| true, cx) + pub fn next(&mut self) { + self.search_forward(|_| true) } #[track_caller] - pub fn search_forward(&mut self, mut filter_node: F, cx: &::Context) + pub fn search_forward(&mut self, mut filter_node: F) where F: FnMut(&T::Summary) -> bool, { @@ -289,7 +291,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: D::zero(cx), + position: D::zero(self.cx), }); descend = true; } @@ -316,8 +318,8 @@ where break; } else { entry.index += 1; - entry.position.add_summary(next_summary, cx); - self.position.add_summary(next_summary, cx); + entry.position.add_summary(next_summary, self.cx); + self.position.add_summary(next_summary, self.cx); } } @@ -327,8 +329,8 @@ where if !descend { let item_summary = &item_summaries[entry.index]; entry.index += 1; - entry.position.add_summary(item_summary, cx); - self.position.add_summary(item_summary, cx); + entry.position.add_summary(item_summary, self.cx); + self.position.add_summary(item_summary, self.cx); } loop { @@ -337,8 +339,8 @@ where return; } else { entry.index += 1; - entry.position.add_summary(next_item_summary, cx); - self.position.add_summary(next_item_summary, cx); + entry.position.add_summary(next_item_summary, self.cx); + self.position.add_summary(next_item_summary, self.cx); } } else { break None; @@ -380,71 +382,51 @@ where D: Dimension<'a, T::Summary>, { #[track_caller] - pub fn seek( - &mut self, - pos: &Target, - bias: Bias, - cx: &::Context, - ) -> bool + pub fn seek(&mut self, pos: &Target, bias: Bias) -> bool where Target: SeekTarget<'a, T::Summary, D>, { - self.reset(cx); - self.seek_internal(pos, bias, &mut (), cx) + self.reset(); + self.seek_internal(pos, bias, &mut ()) } #[track_caller] - pub fn seek_forward( - &mut self, - pos: &Target, - bias: Bias, - cx: &::Context, - ) -> bool + pub fn seek_forward(&mut self, pos: &Target, bias: Bias) -> bool where Target: SeekTarget<'a, T::Summary, D>, { - self.seek_internal(pos, bias, &mut (), cx) + self.seek_internal(pos, bias, &mut ()) } /// Advances the cursor and returns traversed items as a tree. #[track_caller] - pub fn slice( - &mut self, - end: &Target, - bias: Bias, - cx: &::Context, - ) -> SumTree + pub fn slice(&mut self, end: &Target, bias: Bias) -> SumTree where Target: SeekTarget<'a, T::Summary, D>, { let mut slice = SliceSeekAggregate { - tree: SumTree::new(cx), + tree: SumTree::new(self.cx), leaf_items: ArrayVec::new(), leaf_item_summaries: ArrayVec::new(), - leaf_summary: ::zero(cx), + leaf_summary: ::zero(self.cx), }; - self.seek_internal(end, bias, &mut slice, cx); + self.seek_internal(end, bias, &mut slice); slice.tree } #[track_caller] - pub fn suffix(&mut self, cx: &::Context) -> SumTree { - self.slice(&End::new(), Bias::Right, cx) + pub fn suffix(&mut self) -> SumTree { + self.slice(&End::new(), Bias::Right) } #[track_caller] - pub fn summary( - &mut self, - end: &Target, - bias: Bias, - cx: &::Context, - ) -> Output + pub fn summary(&mut self, end: &Target, bias: Bias) -> Output where Target: SeekTarget<'a, T::Summary, D>, Output: Dimension<'a, T::Summary>, { - let mut summary = SummarySeekAggregate(Output::zero(cx)); - self.seek_internal(end, bias, &mut summary, cx); + let mut summary = SummarySeekAggregate(Output::zero(self.cx)); + self.seek_internal(end, bias, &mut summary); summary.0 } @@ -455,10 +437,9 @@ where target: &dyn SeekTarget<'a, T::Summary, D>, bias: Bias, aggregate: &mut dyn SeekAggregate<'a, T>, - cx: &::Context, ) -> bool { assert!( - target.cmp(&self.position, cx) >= Ordering::Equal, + target.cmp(&self.position, self.cx) >= Ordering::Equal, "cannot seek backward", ); @@ -467,7 +448,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: D::zero(cx), + position: D::zero(self.cx), }); } @@ -489,14 +470,14 @@ where .zip(&child_summaries[entry.index..]) { let mut child_end = self.position.clone(); - child_end.add_summary(child_summary, cx); + child_end.add_summary(child_summary, self.cx); - let comparison = target.cmp(&child_end, cx); + let comparison = target.cmp(&child_end, self.cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == Bias::Right) { self.position = child_end; - aggregate.push_tree(child_tree, child_summary, cx); + aggregate.push_tree(child_tree, child_summary, self.cx); entry.index += 1; entry.position = self.position.clone(); } else { @@ -522,22 +503,22 @@ where .zip(&item_summaries[entry.index..]) { let mut child_end = self.position.clone(); - child_end.add_summary(item_summary, cx); + child_end.add_summary(item_summary, self.cx); - let comparison = target.cmp(&child_end, cx); + let comparison = target.cmp(&child_end, self.cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == Bias::Right) { self.position = child_end; - aggregate.push_item(item, item_summary, cx); + aggregate.push_item(item, item_summary, self.cx); entry.index += 1; } else { - aggregate.end_leaf(cx); + aggregate.end_leaf(self.cx); break 'outer; } } - aggregate.end_leaf(cx); + aggregate.end_leaf(self.cx); } } @@ -551,11 +532,11 @@ where let mut end = self.position.clone(); if bias == Bias::Left { if let Some(summary) = self.item_summary() { - end.add_summary(summary, cx); + end.add_summary(summary, self.cx); } } - target.cmp(&end, cx) == Ordering::Equal + target.cmp(&end, self.cx) == Ordering::Equal } } @@ -624,21 +605,19 @@ impl<'a, T: Item> Iterator for Iter<'a, T> { } } -impl<'a, T, S, D> Iterator for Cursor<'a, T, D> +impl<'a, T: Item, D> Iterator for Cursor<'a, T, D> where - T: Item, - S: Summary, D: Dimension<'a, T::Summary>, { type Item = &'a T; fn next(&mut self) -> Option { if !self.did_seek { - self.next(&()); + self.next(); } if let Some(item) = self.item() { - self.next(&()); + self.next(); Some(item) } else { None @@ -651,7 +630,7 @@ pub struct FilterCursor<'a, F, T: Item, D> { filter_node: F, } -impl<'a, F, T, D> FilterCursor<'a, F, T, D> +impl<'a, F, T: Item, D> FilterCursor<'a, F, T, D> where F: FnMut(&T::Summary) -> bool, T: Item, @@ -659,7 +638,7 @@ where { pub fn new( tree: &'a SumTree, - cx: &::Context, + cx: &'a ::Context, filter_node: F, ) -> Self { let cursor = tree.cursor::(cx); @@ -673,8 +652,8 @@ where self.cursor.start() } - pub fn end(&self, cx: &::Context) -> D { - self.cursor.end(cx) + pub fn end(&self) -> D { + self.cursor.end() } pub fn item(&self) -> Option<&'a T> { @@ -685,31 +664,29 @@ where self.cursor.item_summary() } - pub fn next(&mut self, cx: &::Context) { - self.cursor.search_forward(&mut self.filter_node, cx); + pub fn next(&mut self) { + self.cursor.search_forward(&mut self.filter_node); } - pub fn prev(&mut self, cx: &::Context) { - self.cursor.search_backward(&mut self.filter_node, cx); + pub fn prev(&mut self) { + self.cursor.search_backward(&mut self.filter_node); } } -impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U> +impl<'a, F, T: Item, U> Iterator for FilterCursor<'a, F, T, U> where F: FnMut(&T::Summary) -> bool, - T: Item, - S: Summary, //Context for the summary must be unit type, as .next() doesn't take arguments U: Dimension<'a, T::Summary>, { type Item = &'a T; fn next(&mut self) -> Option { if !self.cursor.did_seek { - self.next(&()); + self.next(); } if let Some(item) = self.item() { - self.cursor.search_forward(&mut self.filter_node, &()); + self.cursor.search_forward(&mut self.filter_node); Some(item) } else { None @@ -795,3 +772,23 @@ where self.0.add_summary(summary, cx); } } + +struct End(PhantomData); + +impl End { + fn new() -> Self { + Self(PhantomData) + } +} + +impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End { + fn cmp(&self, _: &D, _: &S::Context) -> Ordering { + Ordering::Greater + } +} + +impl fmt::Debug for End { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("End").finish() + } +} diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 82022d6685..4f9e01ce20 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -38,7 +38,6 @@ pub trait Summary: Clone { type Context; fn zero(cx: &Self::Context) -> Self; - fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } @@ -138,26 +137,6 @@ where } } -struct End(PhantomData); - -impl End { - fn new() -> Self { - Self(PhantomData) - } -} - -impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End { - fn cmp(&self, _: &D, _: &S::Context) -> Ordering { - Ordering::Greater - } -} - -impl fmt::Debug for End { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("End").finish() - } -} - /// Bias is used to settle ambiguities when determining positions in an ordered sequence. /// /// The primary use case is for text, where Bias influences @@ -372,10 +351,10 @@ impl SumTree { pub fn items(&self, cx: &::Context) -> Vec { let mut items = Vec::new(); let mut cursor = self.cursor::<()>(cx); - cursor.next(cx); + cursor.next(); while let Some(item) = cursor.item() { items.push(item.clone()); - cursor.next(cx); + cursor.next(); } items } @@ -384,7 +363,7 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor<'a, T, S> + pub fn cursor<'a, S>(&'a self, cx: &'a ::Context) -> Cursor<'a, T, S> where S: Dimension<'a, T::Summary>, { @@ -395,7 +374,7 @@ impl SumTree { /// that is returned cannot be used with Rust's iterators. pub fn filter<'a, F, U>( &'a self, - cx: &::Context, + cx: &'a ::Context, filter_node: F, ) -> FilterCursor<'a, F, T, U> where @@ -525,10 +504,6 @@ impl SumTree { } } - pub fn ptr_eq(&self, other: &Self) -> bool { - Arc::ptr_eq(&self.0, &other.0) - } - fn push_tree_recursive( &mut self, other: SumTree, @@ -686,11 +661,6 @@ impl SumTree { } => child_trees.last().unwrap().rightmost_leaf(), } } - - #[cfg(debug_assertions)] - pub fn _debug_entries(&self) -> Vec<&T> { - self.iter().collect::>() - } } impl PartialEq for SumTree { @@ -710,15 +680,15 @@ impl SumTree { let mut replaced = None; *self = { let mut cursor = self.cursor::(cx); - let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx); + let mut new_tree = cursor.slice(&item.key(), Bias::Left); if let Some(cursor_item) = cursor.item() { if cursor_item.key() == item.key() { replaced = Some(cursor_item.clone()); - cursor.next(cx); + cursor.next(); } } new_tree.push(item, cx); - new_tree.append(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(), cx); new_tree }; replaced @@ -728,14 +698,14 @@ impl SumTree { let mut removed = None; *self = { let mut cursor = self.cursor::(cx); - let mut new_tree = cursor.slice(key, Bias::Left, cx); + let mut new_tree = cursor.slice(key, Bias::Left); if let Some(item) = cursor.item() { if item.key() == *key { removed = Some(item.clone()); - cursor.next(cx); + cursor.next(); } } - new_tree.append(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(), cx); new_tree }; removed @@ -758,7 +728,7 @@ impl SumTree { let mut new_tree = SumTree::new(cx); let mut buffered_items = Vec::new(); - cursor.seek(&T::Key::zero(cx), Bias::Left, cx); + cursor.seek(&T::Key::zero(cx), Bias::Left); for edit in edits { let new_key = edit.key(); let mut old_item = cursor.item(); @@ -768,7 +738,7 @@ impl SumTree { .map_or(false, |old_item| old_item.key() < new_key) { new_tree.extend(buffered_items.drain(..), cx); - let slice = cursor.slice(&new_key, Bias::Left, cx); + let slice = cursor.slice(&new_key, Bias::Left); new_tree.append(slice, cx); old_item = cursor.item(); } @@ -776,7 +746,7 @@ impl SumTree { if let Some(old_item) = old_item { if old_item.key() == new_key { removed.push(old_item.clone()); - cursor.next(cx); + cursor.next(); } } @@ -789,70 +759,25 @@ impl SumTree { } new_tree.extend(buffered_items, cx); - new_tree.append(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(), cx); new_tree }; removed } - pub fn get(&self, key: &T::Key, cx: &::Context) -> Option<&T> { + pub fn get<'a>( + &'a self, + key: &T::Key, + cx: &'a ::Context, + ) -> Option<&'a T> { let mut cursor = self.cursor::(cx); - if cursor.seek(key, Bias::Left, cx) { + if cursor.seek(key, Bias::Left) { cursor.item() } else { None } } - - #[inline] - pub fn contains(&self, key: &T::Key, cx: &::Context) -> bool { - self.get(key, cx).is_some() - } - - pub fn update( - &mut self, - key: &T::Key, - cx: &::Context, - f: F, - ) -> Option - where - F: FnOnce(&mut T) -> R, - { - let mut cursor = self.cursor::(cx); - let mut new_tree = cursor.slice(key, Bias::Left, cx); - let mut result = None; - if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal { - let mut updated = cursor.item().unwrap().clone(); - result = Some(f(&mut updated)); - new_tree.push(updated, cx); - cursor.next(cx); - } - new_tree.append(cursor.suffix(cx), cx); - drop(cursor); - *self = new_tree; - result - } - - pub fn retain bool>( - &mut self, - cx: &::Context, - mut predicate: F, - ) { - let mut new_map = SumTree::new(cx); - - let mut cursor = self.cursor::(cx); - cursor.next(cx); - while let Some(item) = cursor.item() { - if predicate(&item) { - new_map.push(item.clone(), cx); - } - cursor.next(cx); - } - drop(cursor); - - *self = new_map; - } } impl Default for SumTree @@ -1061,14 +986,14 @@ mod tests { tree = { let mut cursor = tree.cursor::(&()); - let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); + let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right); if rng.r#gen() { new_tree.extend(new_items, &()); } else { new_tree.par_extend(new_items, &()); } - cursor.seek(&Count(splice_end), Bias::Right, &()); - new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &()); + cursor.seek(&Count(splice_end), Bias::Right); + new_tree.append(cursor.slice(&tree_end, Bias::Right), &()); new_tree }; @@ -1090,10 +1015,10 @@ mod tests { .collect::>(); let mut item_ix = if rng.r#gen() { - filter_cursor.next(&()); + filter_cursor.next(); 0 } else { - filter_cursor.prev(&()); + filter_cursor.prev(); expected_filtered_items.len().saturating_sub(1) }; while item_ix < expected_filtered_items.len() { @@ -1103,19 +1028,19 @@ mod tests { assert_eq!(actual_item, &reference_item); assert_eq!(filter_cursor.start().0, reference_index); log::info!("next"); - filter_cursor.next(&()); + filter_cursor.next(); item_ix += 1; while item_ix > 0 && rng.gen_bool(0.2) { log::info!("prev"); - filter_cursor.prev(&()); + filter_cursor.prev(); item_ix -= 1; if item_ix == 0 && rng.gen_bool(0.2) { - filter_cursor.prev(&()); + filter_cursor.prev(); assert_eq!(filter_cursor.item(), None); assert_eq!(filter_cursor.start().0, 0); - filter_cursor.next(&()); + filter_cursor.next(); } } } @@ -1124,9 +1049,9 @@ mod tests { let mut before_start = false; let mut cursor = tree.cursor::(&()); let start_pos = rng.gen_range(0..=reference_items.len()); - cursor.seek(&Count(start_pos), Bias::Right, &()); + cursor.seek(&Count(start_pos), Bias::Right); let mut pos = rng.gen_range(start_pos..=reference_items.len()); - cursor.seek_forward(&Count(pos), Bias::Right, &()); + cursor.seek_forward(&Count(pos), Bias::Right); for i in 0..10 { assert_eq!(cursor.start().0, pos); @@ -1152,13 +1077,13 @@ mod tests { } if i < 5 { - cursor.next(&()); + cursor.next(); if pos < reference_items.len() { pos += 1; before_start = false; } } else { - cursor.prev(&()); + cursor.prev(); if pos == 0 { before_start = true; } @@ -1174,11 +1099,11 @@ mod tests { let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; let mut cursor = tree.cursor::(&()); - cursor.seek(&Count(start), start_bias, &()); - let slice = cursor.slice(&Count(end), end_bias, &()); + cursor.seek(&Count(start), start_bias); + let slice = cursor.slice(&Count(end), end_bias); - cursor.seek(&Count(start), start_bias, &()); - let summary = cursor.summary::<_, Sum>(&Count(end), end_bias, &()); + cursor.seek(&Count(start), start_bias); + let summary = cursor.summary::<_, Sum>(&Count(end), end_bias); assert_eq!(summary.0, slice.summary().sum); } @@ -1191,19 +1116,19 @@ mod tests { let tree = SumTree::::default(); let mut cursor = tree.cursor::(&()); assert_eq!( - cursor.slice(&Count(0), Bias::Right, &()).items(&()), + cursor.slice(&Count(0), Bias::Right).items(&()), Vec::::new() ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), None); @@ -1214,7 +1139,7 @@ mod tests { tree.extend(vec![1], &()); let mut cursor = tree.cursor::(&()); assert_eq!( - cursor.slice(&Count(0), Bias::Right, &()).items(&()), + cursor.slice(&Count(0), Bias::Right).items(&()), Vec::::new() ); assert_eq!(cursor.item(), Some(&1)); @@ -1222,29 +1147,29 @@ mod tests { assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 1); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); let mut cursor = tree.cursor::(&()); - assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); + assert_eq!(cursor.slice(&Count(1), Bias::Right).items(&()), [1]); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 1); - cursor.seek(&Count(0), Bias::Right, &()); + cursor.seek(&Count(0), Bias::Right); assert_eq!( cursor - .slice(&tree.extent::(&()), Bias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right) .items(&()), [1] ); @@ -1258,80 +1183,80 @@ mod tests { tree.extend(vec![1, 2, 3, 4, 5, 6], &()); let mut cursor = tree.cursor::(&()); - assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); + assert_eq!(cursor.slice(&Count(2), Bias::Right).items(&()), [1, 2]); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.next_item(), Some(&4)); assert_eq!(cursor.start().sum, 3); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.next_item(), Some(&5)); assert_eq!(cursor.start().sum, 6); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.next_item(), Some(&6)); assert_eq!(cursor.start().sum, 10); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 15); - cursor.next(&()); - cursor.next(&()); + cursor.next(); + cursor.next(); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 21); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 15); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.next_item(), Some(&6)); assert_eq!(cursor.start().sum, 10); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.next_item(), Some(&5)); assert_eq!(cursor.start().sum, 6); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.next_item(), Some(&4)); assert_eq!(cursor.start().sum, 3); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&2)); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.next_item(), Some(&3)); assert_eq!(cursor.start().sum, 1); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), Some(&2)); assert_eq!(cursor.start().sum, 0); - cursor.prev(&()); + cursor.prev(); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), Some(&1)); assert_eq!(cursor.start().sum, 0); - cursor.next(&()); + cursor.next(); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.next_item(), Some(&2)); @@ -1340,7 +1265,7 @@ mod tests { let mut cursor = tree.cursor::(&()); assert_eq!( cursor - .slice(&tree.extent::(&()), Bias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right) .items(&()), tree.items(&()) ); @@ -1349,10 +1274,10 @@ mod tests { assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 21); - cursor.seek(&Count(3), Bias::Right, &()); + cursor.seek(&Count(3), Bias::Right); assert_eq!( cursor - .slice(&tree.extent::(&()), Bias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right) .items(&()), [4, 5, 6] ); @@ -1362,25 +1287,16 @@ mod tests { assert_eq!(cursor.start().sum, 21); // Seeking can bias left or right - cursor.seek(&Count(1), Bias::Left, &()); + cursor.seek(&Count(1), Bias::Left); assert_eq!(cursor.item(), Some(&1)); - cursor.seek(&Count(1), Bias::Right, &()); + cursor.seek(&Count(1), Bias::Right); assert_eq!(cursor.item(), Some(&2)); // Slicing without resetting starts from where the cursor is parked at. - cursor.seek(&Count(1), Bias::Right, &()); - assert_eq!( - cursor.slice(&Count(3), Bias::Right, &()).items(&()), - vec![2, 3] - ); - assert_eq!( - cursor.slice(&Count(6), Bias::Left, &()).items(&()), - vec![4, 5] - ); - assert_eq!( - cursor.slice(&Count(6), Bias::Right, &()).items(&()), - vec![6] - ); + cursor.seek(&Count(1), Bias::Right); + assert_eq!(cursor.slice(&Count(3), Bias::Right).items(&()), vec![2, 3]); + assert_eq!(cursor.slice(&Count(6), Bias::Left).items(&()), vec![4, 5]); + assert_eq!(cursor.slice(&Count(6), Bias::Right).items(&()), vec![6]); } #[test] diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 884042b722..0397f16182 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -54,7 +54,7 @@ impl TreeMap { pub fn get(&self, key: &K) -> Option<&V> { let mut cursor = self.0.cursor::>(&()); - cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &()); + cursor.seek(&MapKeyRef(Some(key)), Bias::Left); if let Some(item) = cursor.item() { if Some(key) == item.key().0.as_ref() { Some(&item.value) @@ -86,12 +86,12 @@ impl TreeMap { let mut removed = None; let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); - let mut new_tree = cursor.slice(&key, Bias::Left, &()); - if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { + let mut new_tree = cursor.slice(&key, Bias::Left); + if key.cmp(&cursor.end(), &()) == Ordering::Equal { removed = Some(cursor.item().unwrap().value.clone()); - cursor.next(&()); + cursor.next(); } - new_tree.append(cursor.suffix(&()), &()); + new_tree.append(cursor.suffix(), &()); drop(cursor); self.0 = new_tree; removed @@ -101,9 +101,9 @@ impl TreeMap { let start = MapSeekTargetAdaptor(start); let end = MapSeekTargetAdaptor(end); let mut cursor = self.0.cursor::>(&()); - let mut new_tree = cursor.slice(&start, Bias::Left, &()); - cursor.seek(&end, Bias::Left, &()); - new_tree.append(cursor.suffix(&()), &()); + let mut new_tree = cursor.slice(&start, Bias::Left); + cursor.seek(&end, Bias::Left); + new_tree.append(cursor.suffix(), &()); drop(cursor); self.0 = new_tree; } @@ -112,15 +112,15 @@ impl TreeMap { pub fn closest(&self, key: &K) -> Option<(&K, &V)> { let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); - cursor.seek(&key, Bias::Right, &()); - cursor.prev(&()); + cursor.seek(&key, Bias::Right); + cursor.prev(); cursor.item().map(|item| (&item.key, &item.value)) } pub fn iter_from<'a>(&'a self, from: &K) -> impl Iterator + 'a { let mut cursor = self.0.cursor::>(&()); let from_key = MapKeyRef(Some(from)); - cursor.seek(&from_key, Bias::Left, &()); + cursor.seek(&from_key, Bias::Left); cursor.map(|map_entry| (&map_entry.key, &map_entry.value)) } @@ -131,15 +131,15 @@ impl TreeMap { { let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); - let mut new_tree = cursor.slice(&key, Bias::Left, &()); + let mut new_tree = cursor.slice(&key, Bias::Left); let mut result = None; - if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { + if key.cmp(&cursor.end(), &()) == Ordering::Equal { let mut updated = cursor.item().unwrap().clone(); result = Some(f(&mut updated.value)); new_tree.push(updated, &()); - cursor.next(&()); + cursor.next(); } - new_tree.append(cursor.suffix(&()), &()); + new_tree.append(cursor.suffix(), &()); drop(cursor); self.0 = new_tree; result @@ -149,12 +149,12 @@ impl TreeMap { let mut new_map = SumTree::>::default(); let mut cursor = self.0.cursor::>(&()); - cursor.next(&()); + cursor.next(); while let Some(item) = cursor.item() { if predicate(&item.key, &item.value) { new_map.push(item.clone(), &()); } - cursor.next(&()); + cursor.next(); } drop(cursor); diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 83a4fc8429..5807d3aae0 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -101,7 +101,7 @@ impl Anchor { } else { let fragment_id = buffer.fragment_id_for_anchor(self); let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None); - fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None); + fragment_cursor.seek(&Some(fragment_id), Bias::Left); fragment_cursor .item() .map_or(false, |fragment| fragment.visible) diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index aa9682029e..c1da0649da 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -856,14 +856,13 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::(&None); - let mut new_fragments = - old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None); + let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right); new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().visible; for (range, new_text) in edits { let new_text = LineEnding::normalize_arc(new_text.into()); - let fragment_end = old_fragments.end(&None).visible; + let fragment_end = old_fragments.end().visible; // If the current fragment ends before this range, then jump ahead to the first fragment // that extends past the start of this range, reusing any intervening fragments. @@ -879,10 +878,10 @@ impl Buffer { new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } - old_fragments.next(&None); + old_fragments.next(); } - let slice = old_fragments.slice(&range.start, Bias::Right, &None); + let slice = old_fragments.slice(&range.start, Bias::Right); new_ropes.append(slice.summary().text); new_fragments.append(slice, &None); fragment_start = old_fragments.start().visible; @@ -935,7 +934,7 @@ impl Buffer { // portions as deleted. while fragment_start < range.end { let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&None).visible; + let fragment_end = old_fragments.end().visible; let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { @@ -962,7 +961,7 @@ impl Buffer { fragment_start = intersection_end; } if fragment_end <= range.end { - old_fragments.next(&None); + old_fragments.next(); } } @@ -974,7 +973,7 @@ impl Buffer { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. if fragment_start > old_fragments.start().visible { - let fragment_end = old_fragments.end(&None).visible; + let fragment_end = old_fragments.end().visible; if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; @@ -983,10 +982,10 @@ impl Buffer { new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } - old_fragments.next(&None); + old_fragments.next(); } - let suffix = old_fragments.suffix(&None); + let suffix = old_fragments.suffix(); new_ropes.append(suffix.summary().text); new_fragments.append(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); @@ -1073,16 +1072,13 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx); - let mut new_fragments = old_fragments.slice( - &VersionedFullOffset::Offset(ranges[0].start), - Bias::Left, - &cx, - ); + let mut new_fragments = + old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left); new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().0.full_offset(); for (range, new_text) in edits { - let fragment_end = old_fragments.end(&cx).0.full_offset(); + let fragment_end = old_fragments.end().0.full_offset(); // If the current fragment ends before this range, then jump ahead to the first fragment // that extends past the start of this range, reusing any intervening fragments. @@ -1099,18 +1095,18 @@ impl Buffer { new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } - old_fragments.next(&cx); + old_fragments.next(); } let slice = - old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); + old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left); new_ropes.append(slice.summary().text); new_fragments.append(slice, &None); fragment_start = old_fragments.start().0.full_offset(); } // If we are at the end of a non-concurrent fragment, advance to the next one. - let fragment_end = old_fragments.end(&cx).0.full_offset(); + let fragment_end = old_fragments.end().0.full_offset(); if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); fragment.len = fragment_end.0 - fragment_start.0; @@ -1118,7 +1114,7 @@ impl Buffer { new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); - old_fragments.next(&cx); + old_fragments.next(); fragment_start = old_fragments.start().0.full_offset(); } @@ -1128,7 +1124,7 @@ impl Buffer { if fragment_start == range.start && fragment.timestamp > timestamp { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); - old_fragments.next(&cx); + old_fragments.next(); debug_assert_eq!(fragment_start, range.start); } else { break; @@ -1184,7 +1180,7 @@ impl Buffer { // portions as deleted. while fragment_start < range.end { let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&cx).0.full_offset(); + let fragment_end = old_fragments.end().0.full_offset(); let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { @@ -1213,7 +1209,7 @@ impl Buffer { fragment_start = intersection_end; } if fragment_end <= range.end { - old_fragments.next(&cx); + old_fragments.next(); } } } @@ -1221,7 +1217,7 @@ impl Buffer { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. if fragment_start > old_fragments.start().0.full_offset() { - let fragment_end = old_fragments.end(&cx).0.full_offset(); + let fragment_end = old_fragments.end().0.full_offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; @@ -1230,10 +1226,10 @@ impl Buffer { new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } - old_fragments.next(&cx); + old_fragments.next(); } - let suffix = old_fragments.suffix(&cx); + let suffix = old_fragments.suffix(); new_ropes.append(suffix.summary().text); new_fragments.append(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); @@ -1282,7 +1278,6 @@ impl Buffer { split_offset: insertion_slice.range.start, }, Bias::Left, - &(), ); } while let Some(item) = insertions_cursor.item() { @@ -1292,7 +1287,7 @@ impl Buffer { break; } fragment_ids.push(&item.fragment_id); - insertions_cursor.next(&()); + insertions_cursor.next(); } } fragment_ids.sort_unstable(); @@ -1309,7 +1304,7 @@ impl Buffer { RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) { - let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None); + let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left); new_ropes.append(preceding_fragments.summary().text); new_fragments.append(preceding_fragments, &None); @@ -1336,11 +1331,11 @@ impl Buffer { new_ropes.push_fragment(&fragment, fragment_was_visible); new_fragments.push(fragment, &None); - old_fragments.next(&None); + old_fragments.next(); } } - let suffix = old_fragments.suffix(&None); + let suffix = old_fragments.suffix(); new_ropes.append(suffix.summary().text); new_fragments.append(suffix, &None); @@ -1571,7 +1566,7 @@ impl Buffer { .fragment_ids_for_edits(edit_ids.into_iter()) .into_iter() .filter_map(move |fragment_id| { - cursor.seek_forward(&Some(fragment_id), Bias::Left, &None); + cursor.seek_forward(&Some(fragment_id), Bias::Left); let fragment = cursor.item()?; let start_offset = cursor.start().1; let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 }; @@ -1793,7 +1788,7 @@ impl Buffer { let mut cursor = self.snapshot.fragments.cursor::>(&None); for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) { - cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); + cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left); let fragment = cursor.item().unwrap(); assert_eq!(insertion_fragment.fragment_id, fragment.id); assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); @@ -1912,7 +1907,7 @@ impl BufferSnapshot { .filter::<_, FragmentTextSummary>(&None, move |summary| { !version.observed_all(&summary.max_version) }); - cursor.next(&None); + cursor.next(); let mut visible_cursor = self.visible_text.cursor(0); let mut deleted_cursor = self.deleted_text.cursor(0); @@ -1925,18 +1920,18 @@ impl BufferSnapshot { if fragment.was_visible(version, &self.undo_map) { if fragment.visible { - let text = visible_cursor.slice(cursor.end(&None).visible); + let text = visible_cursor.slice(cursor.end().visible); rope.append(text); } else { deleted_cursor.seek_forward(cursor.start().deleted); - let text = deleted_cursor.slice(cursor.end(&None).deleted); + let text = deleted_cursor.slice(cursor.end().deleted); rope.append(text); } } else if fragment.visible { - visible_cursor.seek_forward(cursor.end(&None).visible); + visible_cursor.seek_forward(cursor.end().visible); } - cursor.next(&None); + cursor.next(); } if cursor.start().visible > visible_cursor.offset() { @@ -2252,7 +2247,7 @@ impl BufferSnapshot { timestamp: anchor.timestamp, split_offset: anchor.offset, }; - insertion_cursor.seek(&anchor_key, anchor.bias, &()); + insertion_cursor.seek(&anchor_key, anchor.bias); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); if comparison == Ordering::Greater @@ -2260,15 +2255,15 @@ impl BufferSnapshot { && comparison == Ordering::Equal && anchor.offset > 0) { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } } else { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } let insertion = insertion_cursor.item().expect("invalid insertion"); assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None); + fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { @@ -2299,7 +2294,7 @@ impl BufferSnapshot { split_offset: anchor.offset, }; let mut insertion_cursor = self.insertions.cursor::(&()); - insertion_cursor.seek(&anchor_key, anchor.bias, &()); + insertion_cursor.seek(&anchor_key, anchor.bias); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); if comparison == Ordering::Greater @@ -2307,10 +2302,10 @@ impl BufferSnapshot { && comparison == Ordering::Equal && anchor.offset > 0) { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } } else { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } let Some(insertion) = insertion_cursor @@ -2324,7 +2319,7 @@ impl BufferSnapshot { }; let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); - fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { @@ -2345,7 +2340,7 @@ impl BufferSnapshot { split_offset: anchor.offset, }; let mut insertion_cursor = self.insertions.cursor::(&()); - insertion_cursor.seek(&anchor_key, anchor.bias, &()); + insertion_cursor.seek(&anchor_key, anchor.bias); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); if comparison == Ordering::Greater @@ -2353,10 +2348,10 @@ impl BufferSnapshot { && comparison == Ordering::Equal && anchor.offset > 0) { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } } else { - insertion_cursor.prev(&()); + insertion_cursor.prev(); } let Some(insertion) = insertion_cursor.item().filter(|insertion| { @@ -2395,7 +2390,7 @@ impl BufferSnapshot { Anchor::MAX } else { let mut fragment_cursor = self.fragments.cursor::(&None); - fragment_cursor.seek(&offset, bias, &None); + fragment_cursor.seek(&offset, bias); let fragment = fragment_cursor.item().unwrap(); let overshoot = offset - *fragment_cursor.start(); Anchor { @@ -2475,7 +2470,7 @@ impl BufferSnapshot { let mut cursor = self.fragments.filter(&None, move |summary| { !since.observed_all(&summary.max_version) }); - cursor.next(&None); + cursor.next(); Some(cursor) }; let mut cursor = self @@ -2483,7 +2478,7 @@ impl BufferSnapshot { .cursor::<(Option<&Locator>, FragmentTextSummary)>(&None); let start_fragment_id = self.fragment_id_for_anchor(&range.start); - cursor.seek(&Some(start_fragment_id), Bias::Left, &None); + cursor.seek(&Some(start_fragment_id), Bias::Left); let mut visible_start = cursor.start().1.visible; let mut deleted_start = cursor.start().1.deleted; if let Some(fragment) = cursor.item() { @@ -2516,7 +2511,7 @@ impl BufferSnapshot { let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { !since.observed_all(&summary.max_version) }); - cursor.next(&None); + cursor.next(); while let Some(fragment) = cursor.item() { if fragment.id > *end_fragment_id { break; @@ -2528,7 +2523,7 @@ impl BufferSnapshot { return true; } } - cursor.next(&None); + cursor.next(); } } false @@ -2539,14 +2534,14 @@ impl BufferSnapshot { let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { !since.observed_all(&summary.max_version) }); - cursor.next(&None); + cursor.next(); while let Some(fragment) = cursor.item() { let was_visible = fragment.was_visible(since, &self.undo_map); let is_visible = fragment.visible; if was_visible != is_visible { return true; } - cursor.next(&None); + cursor.next(); } } false @@ -2651,7 +2646,7 @@ impl bool> Iterator for Ed while let Some(fragment) = cursor.item() { if fragment.id < *self.range.start.0 { - cursor.next(&None); + cursor.next(); continue; } else if fragment.id > *self.range.end.0 { break; @@ -2684,7 +2679,7 @@ impl bool> Iterator for Ed }; if !fragment.was_visible(self.since, self.undos) && fragment.visible { - let mut visible_end = cursor.end(&None).visible; + let mut visible_end = cursor.end().visible; if fragment.id == *self.range.end.0 { visible_end = cmp::min( visible_end, @@ -2710,7 +2705,7 @@ impl bool> Iterator for Ed self.new_end = new_end; } else if fragment.was_visible(self.since, self.undos) && !fragment.visible { - let mut deleted_end = cursor.end(&None).deleted; + let mut deleted_end = cursor.end().deleted; if fragment.id == *self.range.end.0 { deleted_end = cmp::min( deleted_end, @@ -2740,7 +2735,7 @@ impl bool> Iterator for Ed self.old_end = old_end; } - cursor.next(&None); + cursor.next(); } pending_edit diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index ed363cfc6b..6a409189fa 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -74,7 +74,6 @@ impl UndoMap { undo_id: Default::default(), }, Bias::Left, - &(), ); let mut undo_count = 0; @@ -99,7 +98,6 @@ impl UndoMap { undo_id: Default::default(), }, Bias::Left, - &(), ); let mut undo_count = 0; diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 8c407fdd3e..4fc6b91abb 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2454,16 +2454,16 @@ impl Snapshot { self.entries_by_path = { let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = - cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &()); + cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left); while let Some(entry) = cursor.item() { if entry.path.starts_with(&removed_entry.path) { self.entries_by_id.remove(&entry.id, &()); - cursor.next(&()); + cursor.next(); } else { break; } } - new_entries_by_path.append(cursor.suffix(&()), &()); + new_entries_by_path.append(cursor.suffix(), &()); new_entries_by_path }; @@ -2576,7 +2576,6 @@ impl Snapshot { include_ignored, }, Bias::Right, - &(), ); Traversal { snapshot: self, @@ -2632,7 +2631,7 @@ impl Snapshot { options: ChildEntriesOptions, ) -> ChildEntriesIter<'a> { let mut cursor = self.entries_by_path.cursor(&()); - cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &()); + cursor.seek(&TraversalTarget::path(parent_path), Bias::Right); let traversal = Traversal { snapshot: self, cursor, @@ -3056,9 +3055,9 @@ impl BackgroundScannerState { .snapshot .entries_by_path .cursor::(&()); - new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &()); - removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &()); - new_entries.append(cursor.suffix(&()), &()); + new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left); + removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left); + new_entries.append(cursor.suffix(), &()); } self.snapshot.entries_by_path = new_entries; @@ -4925,15 +4924,15 @@ fn build_diff( let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); let mut last_newly_loaded_dir_path = None; - old_paths.next(&()); - new_paths.next(&()); + old_paths.next(); + new_paths.next(); for path in event_paths { let path = PathKey(path.clone()); if old_paths.item().map_or(false, |e| e.path < path.0) { - old_paths.seek_forward(&path, Bias::Left, &()); + old_paths.seek_forward(&path, Bias::Left); } if new_paths.item().map_or(false, |e| e.path < path.0) { - new_paths.seek_forward(&path, Bias::Left, &()); + new_paths.seek_forward(&path, Bias::Left); } loop { match (old_paths.item(), new_paths.item()) { @@ -4949,7 +4948,7 @@ fn build_diff( match Ord::cmp(&old_entry.path, &new_entry.path) { Ordering::Less => { changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); + old_paths.next(); } Ordering::Equal => { if phase == EventsReceivedDuringInitialScan { @@ -4975,8 +4974,8 @@ fn build_diff( changes.push((new_entry.path.clone(), new_entry.id, Updated)); } } - old_paths.next(&()); - new_paths.next(&()); + old_paths.next(); + new_paths.next(); } Ordering::Greater => { let is_newly_loaded = phase == InitialScan @@ -4988,13 +4987,13 @@ fn build_diff( new_entry.id, if is_newly_loaded { Loaded } else { Added }, )); - new_paths.next(&()); + new_paths.next(); } } } (Some(old_entry), None) => { changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); + old_paths.next(); } (None, Some(new_entry)) => { let is_newly_loaded = phase == InitialScan @@ -5006,7 +5005,7 @@ fn build_diff( new_entry.id, if is_newly_loaded { Loaded } else { Added }, )); - new_paths.next(&()); + new_paths.next(); } (None, None) => break, } @@ -5255,7 +5254,7 @@ impl<'a> Traversal<'a> { start_path: &Path, ) -> Self { let mut cursor = snapshot.entries_by_path.cursor(&()); - cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &()); + cursor.seek(&TraversalTarget::path(start_path), Bias::Left); let mut traversal = Self { snapshot, cursor, @@ -5282,14 +5281,13 @@ impl<'a> Traversal<'a> { include_ignored: self.include_ignored, }, Bias::Left, - &(), ) } pub fn advance_to_sibling(&mut self) -> bool { while let Some(entry) = self.cursor.item() { self.cursor - .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &()); + .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left); if let Some(entry) = self.cursor.item() { if (self.include_files || !entry.is_file()) && (self.include_dirs || !entry.is_dir()) @@ -5307,7 +5305,7 @@ impl<'a> Traversal<'a> { return false; }; self.cursor - .seek(&TraversalTarget::path(parent_path), Bias::Left, &()) + .seek(&TraversalTarget::path(parent_path), Bias::Left) } pub fn entry(&self) -> Option<&'a Entry> { @@ -5326,7 +5324,7 @@ impl<'a> Traversal<'a> { pub fn end_offset(&self) -> usize { self.cursor - .end(&()) + .end() .count(self.include_files, self.include_dirs, self.include_ignored) } } From caa520c4999a06907e3cdc6a81ecb634421f0c6a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 22 Jul 2025 18:52:17 +0200 Subject: [PATCH 14/25] workspace: Clean up empty panes left over from file opening failures (#34908) Closes #34583 Release Notes: - Fixed empty pane being left after a binary file is dropped into a new pane.s --- crates/workspace/src/pane.rs | 41 ++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index e57b103c61..c7a2562a1b 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -3239,28 +3239,37 @@ impl Pane { split_direction = None; } - if let Ok(open_task) = workspace.update_in(cx, |workspace, window, cx| { - if let Some(split_direction) = split_direction { - to_pane = workspace.split_pane(to_pane, split_direction, window, cx); - } - workspace.open_paths( - paths, - OpenOptions { - visible: Some(OpenVisible::OnlyDirectories), - ..Default::default() - }, - Some(to_pane.downgrade()), - window, - cx, - ) - }) { + if let Ok((open_task, to_pane)) = + workspace.update_in(cx, |workspace, window, cx| { + if let Some(split_direction) = split_direction { + to_pane = + workspace.split_pane(to_pane, split_direction, window, cx); + } + ( + workspace.open_paths( + paths, + OpenOptions { + visible: Some(OpenVisible::OnlyDirectories), + ..Default::default() + }, + Some(to_pane.downgrade()), + window, + cx, + ), + to_pane, + ) + }) + { let opened_items: Vec<_> = open_task.await; - _ = workspace.update(cx, |workspace, cx| { + _ = workspace.update_in(cx, |workspace, window, cx| { for item in opened_items.into_iter().flatten() { if let Err(e) = item { workspace.show_error(&e, cx); } } + if to_pane.read(cx).items_len() == 0 { + workspace.remove_pane(to_pane, None, window, cx); + } }); } }) From 14cea06f0fb591129df0faac2163dcaf74444a83 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 22 Jul 2025 12:18:59 -0500 Subject: [PATCH 15/25] keymap_ui: Fix panic in clear keystrokes (#34909) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/settings_ui/src/keybindings.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/settings_ui/src/keybindings.rs b/crates/settings_ui/src/keybindings.rs index 5f940e8a25..9e885f69f6 100644 --- a/crates/settings_ui/src/keybindings.rs +++ b/crates/settings_ui/src/keybindings.rs @@ -2784,6 +2784,7 @@ impl KeystrokeInput { else { log::trace!("No keybinding to stop recording keystrokes in keystroke input"); self.close_keystrokes.take(); + self.close_keystrokes_start.take(); return CloseKeystrokeResult::None; }; let action_keystrokes = keybind_for_close_action.keystrokes(); @@ -2976,7 +2977,9 @@ impl KeystrokeInput { return; } window.focus(&self.outer_focus_handle); - if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() { + if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() + && close_keystrokes_start < self.keystrokes.len() + { self.keystrokes.drain(close_keystrokes_start..); } self.close_keystrokes.take(); From d81a8178e916f92c3277098f1f199b05b0d20bde Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 22 Jul 2025 11:35:58 -0600 Subject: [PATCH 16/25] Bind "j k" to `NormalBefore` in initial keymap examples (#34912) It looks like typically vim configurations bind "j k" to be the same as escape, which has the "NormalBefore" behavior positioning the block cursor on the character before the insertion cursor. The [vim mode docs](https://zed.dev/docs/vim#useful-contexts-for-vim-mode-key-bindings) also use NormalBefore here. Thanks to @omniwrench for mentioning this in https://github.com/zed-industries/zed/discussions/6661#discussioncomment-13848043 . This was a mistake in #31163. Release Notes: - N/A --- assets/keymaps/initial.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/keymaps/initial.json b/assets/keymaps/initial.json index 0cfd28f0e5..ff6069a816 100644 --- a/assets/keymaps/initial.json +++ b/assets/keymaps/initial.json @@ -15,7 +15,7 @@ { "context": "Editor && vim_mode == insert && !menu", "bindings": { - // "j k": "vim::SwitchToNormalMode" + // "j k": "vim::NormalBefore" } } ] From 9e280d09057ec1d8481f64b48b24869d325a5574 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 22 Jul 2025 13:42:07 -0400 Subject: [PATCH 17/25] collab: Remove unneeded caching of Stripe price IDs by meter ID (#34915) This PR removes the caching of Stripe price IDs by meter ID on the `StripeBilling` object, as we weren't actually reading them anywhere. Release Notes: - N/A --- crates/collab/src/stripe_billing.rs | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs index 707928d5cd..50accf9557 100644 --- a/crates/collab/src/stripe_billing.rs +++ b/crates/collab/src/stripe_billing.rs @@ -30,7 +30,6 @@ pub struct StripeBilling { #[derive(Default)] struct StripeBillingState { - price_ids_by_meter_id: HashMap, prices_by_lookup_key: HashMap, } @@ -63,13 +62,7 @@ impl StripeBilling { for price in prices { if let Some(lookup_key) = price.lookup_key.clone() { - state.prices_by_lookup_key.insert(lookup_key, price.clone()); - } - - if let Some(recurring) = price.recurring { - if let Some(meter) = recurring.meter { - state.price_ids_by_meter_id.insert(meter, price.id); - } + state.prices_by_lookup_key.insert(lookup_key, price); } } From 99466f4aebbf20dc00077dcce55f3b4ce1d2b1d3 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 22 Jul 2025 13:57:36 -0400 Subject: [PATCH 18/25] Make zooming from menus not persist (#34910) Closes: https://github.com/zed-industries/zed/issues/34479 Follow-up to: https://github.com/zed-industries/zed/issues/23505 View->Zoom In / Zoom Out / Reset Zoom were not reverted to match when the default keybindings were reverted. Release Notes: - N/A --- crates/zed/src/zed/app_menus.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index c4131dbee9..78532b10b4 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -145,15 +145,15 @@ pub fn app_menus() -> Vec { items: vec![ MenuItem::action( "Zoom In", - zed_actions::IncreaseBufferFontSize { persist: true }, + zed_actions::IncreaseBufferFontSize { persist: false }, ), MenuItem::action( "Zoom Out", - zed_actions::DecreaseBufferFontSize { persist: true }, + zed_actions::DecreaseBufferFontSize { persist: false }, ), MenuItem::action( "Reset Zoom", - zed_actions::ResetBufferFontSize { persist: true }, + zed_actions::ResetBufferFontSize { persist: false }, ), MenuItem::separator(), MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock), From 4272c1508e22a96a5822b422ee5a0b3dd1621512 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 22 Jul 2025 15:41:12 -0300 Subject: [PATCH 19/25] ai onboarding: Copyedit the whole flow (#34916) Release Notes: - N/A Co-authored-by: Katie Geer --- .../src/agent_api_keys_onboarding.rs | 4 +- crates/ai_onboarding/src/ai_onboarding.rs | 41 ++++++++----------- .../ai_onboarding/src/young_account_banner.rs | 2 +- 3 files changed, 19 insertions(+), 28 deletions(-) diff --git a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs index 883317e566..5f56e4d26e 100644 --- a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs +++ b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs @@ -93,7 +93,7 @@ impl Render for ApiKeysWithProviders { div() .w_full() .child( - Label::new("Or start now using API keys from your environment for the following providers:") + Label::new("Start now using API keys from your environment for the following providers:") .color(Color::Muted) ) ) @@ -129,7 +129,7 @@ impl RenderOnce for ApiKeysWithoutProviders { .child(Divider::horizontal()), ) .child(List::new().child(BulletItem::new( - "You can also use AI in Zed by bringing your own API keys", + "Add your own keys to use AI without signing in.", ))) .child( Button::new("configure-providers", "Configure Providers") diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index 88c962c1ba..e8ce22ff4e 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -141,22 +141,18 @@ impl ZedAiOnboarding { ) .child( List::new() + .child(BulletItem::new("50 prompts per month with Claude models")) .child(BulletItem::new( - "50 prompts per month with the Claude models", - )) - .child(BulletItem::new( - "2000 accepted edit predictions using our open-source Zeta model", + "2,000 accepted edit predictions with Zeta, our open-source model", )), ) } fn pro_trial_definition(&self) -> impl IntoElement { List::new() + .child(BulletItem::new("150 prompts with Claude models")) .child(BulletItem::new( - "150 prompts per month with the Claude models", - )) - .child(BulletItem::new( - "Unlimited accepted edit predictions using our open-source Zeta model", + "Unlimited accepted edit predictions with Zeta, our open-source model", )) } @@ -178,12 +174,12 @@ impl ZedAiOnboarding { List::new() .child(BulletItem::new("500 prompts per month with Claude models")) .child(BulletItem::new( - "Unlimited accepted edit predictions using our open-source Zeta model", + "Unlimited accepted edit predictions with Zeta, our open-source model", )) - .child(BulletItem::new("USD $20 per month")), + .child(BulletItem::new("$20 USD per month")), ) .child( - Button::new("pro", "Start with Pro") + Button::new("pro", "Get Started") .full_width() .style(ButtonStyle::Tinted(ui::TintColor::Accent)) .on_click(move |_, _window, cx| { @@ -206,11 +202,11 @@ impl ZedAiOnboarding { List::new() .child(self.pro_trial_definition()) .child(BulletItem::new( - "Try it out for 14 days with no charge and no credit card required", + "Try it out for 14 days for free, no credit card required", )), ) .child( - Button::new("pro", "Start Pro Trial") + Button::new("pro", "Start Free Trial") .full_width() .style(ButtonStyle::Tinted(ui::TintColor::Accent)) .on_click(move |_, _window, cx| { @@ -225,14 +221,14 @@ impl ZedAiOnboarding { v_flex() .gap_1() .w_full() - .child(Headline::new("Before starting…")) + .child(Headline::new("Accept Terms of Service")) .child( - Label::new("Make sure you have read and accepted Zed AI's terms of service.") + Label::new("We don’t sell your data, track you across the web, or compromise your privacy.") .color(Color::Muted) .mb_2(), ) .child( - Button::new("terms_of_service", "View and Read the Terms of Service") + Button::new("terms_of_service", "Review Terms of Service") .full_width() .style(ButtonStyle::Outlined) .icon(IconName::ArrowUpRight) @@ -241,7 +237,7 @@ impl ZedAiOnboarding { .on_click(move |_, _window, cx| cx.open_url(&zed_urls::terms_of_service(cx))), ) .child( - Button::new("accept_terms", "I've read it and accept it") + Button::new("accept_terms", "Accept") .full_width() .style(ButtonStyle::Tinted(TintColor::Accent)) .on_click({ @@ -259,13 +255,13 @@ impl ZedAiOnboarding { .gap_1() .child(Headline::new("Welcome to Zed AI")) .child( - Label::new("Sign in to start using AI in Zed with a free trial of the Pro plan, which includes:") + Label::new("Sign in to try Zed Pro for 14 days, no credit card required.") .color(Color::Muted) .mb_2(), ) .child(self.pro_trial_definition()) .child( - Button::new("sign_in", "Sign in to Start Trial") + Button::new("sign_in", "Try Zed Pro for Free") .disabled(signing_in) .full_width() .style(ButtonStyle::Tinted(ui::TintColor::Accent)) @@ -284,11 +280,6 @@ impl ZedAiOnboarding { .relative() .gap_1() .child(Headline::new("Welcome to Zed AI")) - .child( - Label::new("Choose how you want to start.") - .color(Color::Muted) - .mb_2(), - ) .map(|this| { if self.account_too_young { this.child(young_account_banner) @@ -318,7 +309,7 @@ impl ZedAiOnboarding { v_flex() .relative() .gap_1() - .child(Headline::new("Welcome to the Zed Pro free trial")) + .child(Headline::new("Welcome to the Zed Pro Trial")) .child( Label::new("Here's what you get for the next 14 days:") .color(Color::Muted) diff --git a/crates/ai_onboarding/src/young_account_banner.rs b/crates/ai_onboarding/src/young_account_banner.rs index 1e1ed3a865..a43625a60e 100644 --- a/crates/ai_onboarding/src/young_account_banner.rs +++ b/crates/ai_onboarding/src/young_account_banner.rs @@ -6,7 +6,7 @@ pub struct YoungAccountBanner; impl RenderOnce for YoungAccountBanner { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing@zed.dev."; + const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing-support@zed.dev."; let label = div() .w_full() From 708c2645d104e39d01122ada2ebc0771d4f1623b Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 22 Jul 2025 20:53:57 +0200 Subject: [PATCH 20/25] collab: Tweak screen selector appearance (#34919) Co-authored-by: Danilo Leal Release Notes: - N/A Co-authored-by: Danilo Leal --- crates/git_ui/src/git_ui.rs | 2 +- crates/title_bar/src/collab.rs | 21 +++++++--- .../ui/src/components/button/split_button.rs | 40 ++++++++++++++----- crates/ui/src/components/list/list.rs | 4 +- 4 files changed, 48 insertions(+), 19 deletions(-) diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index a9ccaf7160..02b9c243fb 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -501,7 +501,7 @@ mod remote_button { ) .into_any_element(); - SplitButton { left, right } + SplitButton::new(left, right) } } diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 1eebc0de0c..056c981ccf 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -11,8 +11,8 @@ use gpui::{App, Task, Window, actions}; use rpc::proto::{self}; use theme::ActiveTheme; use ui::{ - Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Facepile, PopoverMenu, - SplitButton, TintColor, Tooltip, prelude::*, + Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Divider, Facepile, + PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*, }; use util::maybe; use workspace::notifications::DetachAndPromptErr; @@ -383,6 +383,7 @@ impl TitleBar { .detach_and_log_err(cx); }), ) + .child(Divider::vertical()) .into_any_element(), ); @@ -497,6 +498,7 @@ impl TitleBar { trigger.render(window, cx), self.render_screen_list().into_any_element(), ) + .style(SplitButtonStyle::Outlined) .into_any_element(), ); } @@ -547,10 +549,17 @@ impl TitleBar { entry_render: Box::new(move |_, _| { h_flex() .gap_2() - .child(Icon::new(IconName::Screen).when( - active_screenshare_id == Some(meta.id), - |this| this.color(Color::Accent), - )) + .child( + Icon::new(IconName::Screen) + .size(IconSize::XSmall) + .map(|this| { + if active_screenshare_id == Some(meta.id) { + this.color(Color::Accent) + } else { + this.color(Color::Muted) + } + }), + ) .child(Label::new(label.clone())) .child( Label::new(resolution.clone()) diff --git a/crates/ui/src/components/button/split_button.rs b/crates/ui/src/components/button/split_button.rs index c0811ecbab..a7fa2106d1 100644 --- a/crates/ui/src/components/button/split_button.rs +++ b/crates/ui/src/components/button/split_button.rs @@ -1,6 +1,6 @@ use gpui::{ AnyElement, App, BoxShadow, IntoElement, ParentElement, RenderOnce, Styled, Window, div, hsla, - point, px, + point, prelude::FluentBuilder, px, }; use theme::ActiveTheme; @@ -8,6 +8,12 @@ use crate::{ElevationIndex, h_flex}; use super::ButtonLike; +#[derive(Clone, Copy, PartialEq)] +pub enum SplitButtonStyle { + Filled, + Outlined, +} + /// /// A button with two parts: a primary action on the left and a secondary action on the right. /// /// The left side is a [`ButtonLike`] with the main action, while the right side can contain @@ -18,11 +24,21 @@ use super::ButtonLike; pub struct SplitButton { pub left: ButtonLike, pub right: AnyElement, + style: SplitButtonStyle, } impl SplitButton { pub fn new(left: ButtonLike, right: AnyElement) -> Self { - Self { left, right } + Self { + left, + right, + style: SplitButtonStyle::Filled, + } + } + + pub fn style(mut self, style: SplitButtonStyle) -> Self { + self.style = style; + self } } @@ -31,21 +47,23 @@ impl RenderOnce for SplitButton { h_flex() .rounded_sm() .border_1() - .border_color(cx.theme().colors().text_muted.alpha(0.12)) + .border_color(cx.theme().colors().border.opacity(0.5)) .child(div().flex_grow().child(self.left)) .child( div() .h_full() .w_px() - .bg(cx.theme().colors().text_muted.alpha(0.16)), + .bg(cx.theme().colors().border.opacity(0.5)), ) .child(self.right) - .bg(ElevationIndex::Surface.on_elevation_bg(cx)) - .shadow(vec![BoxShadow { - color: hsla(0.0, 0.0, 0.0, 0.16), - offset: point(px(0.), px(1.)), - blur_radius: px(0.), - spread_radius: px(0.), - }]) + .when(self.style == SplitButtonStyle::Filled, |this| { + this.bg(ElevationIndex::Surface.on_elevation_bg(cx)) + .shadow(vec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.16), + offset: point(px(0.), px(1.)), + blur_radius: px(0.), + spread_radius: px(0.), + }]) + }) } } diff --git a/crates/ui/src/components/list/list.rs b/crates/ui/src/components/list/list.rs index 1402b5d3d3..b6950f06a4 100644 --- a/crates/ui/src/components/list/list.rs +++ b/crates/ui/src/components/list/list.rs @@ -84,7 +84,9 @@ impl RenderOnce for List { (false, _) => this.children(self.children), (true, Some(false)) => this, (true, _) => match self.empty_message { - EmptyMessage::Text(text) => this.child(Label::new(text).color(Color::Muted)), + EmptyMessage::Text(text) => { + this.px_2().child(Label::new(text).color(Color::Muted)) + } EmptyMessage::Element(element) => this.child(element), }, }) From c0f75e1a175f0bf7a0931ecad9fb9e803034a3b0 Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Tue, 22 Jul 2025 21:39:52 +0200 Subject: [PATCH 21/25] debugger: Fix built-in JavaScript debug tasks were not working due missing `type` field value (#34894) Release Notes: - Debugger: Fix built-in JavaScript debug tasks were not working due missing `type` field value --- assets/settings/initial_debug_tasks.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/assets/settings/initial_debug_tasks.json b/assets/settings/initial_debug_tasks.json index 78fc1fc5f0..af4512bd51 100644 --- a/assets/settings/initial_debug_tasks.json +++ b/assets/settings/initial_debug_tasks.json @@ -15,13 +15,15 @@ "adapter": "JavaScript", "program": "$ZED_FILE", "request": "launch", - "cwd": "$ZED_WORKTREE_ROOT" + "cwd": "$ZED_WORKTREE_ROOT", + "type": "pwa-node" }, { "label": "JavaScript debug terminal", "adapter": "JavaScript", "request": "launch", "cwd": "$ZED_WORKTREE_ROOT", - "console": "integratedTerminal" + "console": "integratedTerminal", + "type": "pwa-node" } ] From 446d333515fc6d6fca0a8c5ef3e929506c49396f Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Tue, 22 Jul 2025 21:40:11 +0200 Subject: [PATCH 22/25] debugger: Fix debug console persist to history when reusing a previous item (#34893) Closes #34887 Release Notes: - Debugger: Fix debug console persist to history when reusing a previous item --- crates/project/src/search_history.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/project/src/search_history.rs b/crates/project/src/search_history.rs index b84c2e0982..90b169bb0c 100644 --- a/crates/project/src/search_history.rs +++ b/crates/project/src/search_history.rs @@ -45,12 +45,6 @@ impl SearchHistory { } pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) { - if let Some(selected_ix) = cursor.selection { - if self.history.get(selected_ix) == Some(&search_string) { - return; - } - } - if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains { if let Some(previously_searched) = self.history.back_mut() { if search_string.contains(previously_searched.as_str()) { @@ -144,6 +138,14 @@ mod tests { ); assert_eq!(search_history.current(&cursor), Some("rustlang")); + // add item when it equals to current item if it's not the last one + search_history.add(&mut cursor, "php".to_string()); + search_history.previous(&mut cursor); + assert_eq!(search_history.current(&cursor), Some("rustlang")); + search_history.add(&mut cursor, "rustlang".to_string()); + assert_eq!(search_history.history.len(), 3, "Should add item"); + assert_eq!(search_history.current(&cursor), Some("rustlang")); + // push enough items to test SEARCH_HISTORY_LIMIT for i in 0..MAX_HISTORY_LEN * 2 { search_history.add(&mut cursor, format!("item{i}")); From f3c332d839a2fa4d3b8ba6906a497171f5de9a74 Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Wed, 23 Jul 2025 01:16:25 +0530 Subject: [PATCH 23/25] Fix new crate license symlink (#34922) The license file is not properly linked to actual license. This was casued due to new-crate script linking the license to wrong file. Fixed both of them. Reference logs: ``` 2025-07-22T17:16:19+05:30 ERROR [worktree] error reading target of symlink "/Users/umesh/code/zed/crates/onboarding/LICENSE-GPL": canonicalizing ``` Release Notes: - N/A --- crates/onboarding/LICENSE-GPL | 2 +- script/new-crate | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/onboarding/LICENSE-GPL b/crates/onboarding/LICENSE-GPL index dd648cce4f..89e542f750 120000 --- a/crates/onboarding/LICENSE-GPL +++ b/crates/onboarding/LICENSE-GPL @@ -1 +1 @@ -../../../LICENSE-GPL \ No newline at end of file +../../LICENSE-GPL \ No newline at end of file diff --git a/script/new-crate b/script/new-crate index df574981e7..52ee900b30 100755 --- a/script/new-crate +++ b/script/new-crate @@ -39,7 +39,7 @@ CRATE_PATH="crates/$CRATE_NAME" mkdir -p "$CRATE_PATH/src" # Symlink the license -ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE" +ln -sf "../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE" CARGO_TOML_TEMPLATE=$(cat << 'EOF' [package] From 7f70325a93b63d47cc08ce9e040ba0dcb27f306c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 22 Jul 2025 16:04:08 -0400 Subject: [PATCH 24/25] language_models: Rename `handler` to `handle` in Bedrock provider (#34923) This PR renames the `handler` field to `handle` on the `BedrockLanguageModelProvider` and `BedrockModel` structs. Release Notes: - N/A --- crates/language_models/src/provider/bedrock.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index a022511b11..a86b3e78f5 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -243,7 +243,7 @@ impl State { pub struct BedrockLanguageModelProvider { http_client: AwsHttpClient, - handler: tokio::runtime::Handle, + handle: tokio::runtime::Handle, state: gpui::Entity, } @@ -260,7 +260,7 @@ impl BedrockLanguageModelProvider { Self { http_client: AwsHttpClient::new(http_client.clone()), - handler: Tokio::handle(cx), + handle: Tokio::handle(cx), state, } } @@ -270,7 +270,7 @@ impl BedrockLanguageModelProvider { id: LanguageModelId::from(model.id().to_string()), model, http_client: self.http_client.clone(), - handler: self.handler.clone(), + handle: self.handle.clone(), state: self.state.clone(), client: OnceCell::new(), request_limiter: RateLimiter::new(4), @@ -371,7 +371,7 @@ struct BedrockModel { id: LanguageModelId, model: Model, http_client: AwsHttpClient, - handler: tokio::runtime::Handle, + handle: tokio::runtime::Handle, client: OnceCell, state: gpui::Entity, request_limiter: RateLimiter, @@ -443,7 +443,7 @@ impl BedrockModel { } } - let config = self.handler.block_on(config_builder.load()); + let config = self.handle.block_on(config_builder.load()); anyhow::Ok(BedrockClient::new(&config)) }) .context("initializing Bedrock client")?; From 5d985fa1d8684aa85e1bbde944c4f1b42591fedd Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 22 Jul 2025 19:14:34 -0300 Subject: [PATCH 25/25] Improve MCP server responses (#34927) Release Notes: - N/A --- crates/agent_servers/src/claude/mcp_server.rs | 26 ++++++++----------- crates/agent_servers/src/claude/tools.rs | 10 ------- 2 files changed, 11 insertions(+), 25 deletions(-) diff --git a/crates/agent_servers/src/claude/mcp_server.rs b/crates/agent_servers/src/claude/mcp_server.rs index 468027c4c3..2405603550 100644 --- a/crates/agent_servers/src/claude/mcp_server.rs +++ b/crates/agent_servers/src/claude/mcp_server.rs @@ -19,7 +19,7 @@ use util::debug_panic; use crate::claude::{ McpServerConfig, - tools::{ClaudeTool, EditToolParams, EditToolResponse, ReadToolParams, ReadToolResponse}, + tools::{ClaudeTool, EditToolParams, ReadToolParams}, }; pub struct ClaudeMcpServer { @@ -179,11 +179,9 @@ impl ClaudeMcpServer { let input = serde_json::from_value(request.arguments.context("Arguments required")?)?; - let result = Self::handle_read_tool_call(input, delegate, cx).await?; + let content = Self::handle_read_tool_call(input, delegate, cx).await?; Ok(CallToolResponse { - content: vec![ToolResponseContent::Text { - text: serde_json::to_string(&result)?, - }], + content, is_error: None, meta: None, }) @@ -191,11 +189,9 @@ impl ClaudeMcpServer { let input = serde_json::from_value(request.arguments.context("Arguments required")?)?; - let result = Self::handle_edit_tool_call(input, delegate, cx).await?; + Self::handle_edit_tool_call(input, delegate, cx).await?; Ok(CallToolResponse { - content: vec![ToolResponseContent::Text { - text: serde_json::to_string(&result)?, - }], + content: vec![], is_error: None, meta: None, }) @@ -209,7 +205,7 @@ impl ClaudeMcpServer { params: ReadToolParams, delegate: AcpClientDelegate, cx: &AsyncApp, - ) -> Task> { + ) -> Task>> { cx.foreground_executor().spawn(async move { let response = delegate .read_text_file(ReadTextFileParams { @@ -219,9 +215,9 @@ impl ClaudeMcpServer { }) .await?; - Ok(ReadToolResponse { - content: response.content, - }) + Ok(vec![ToolResponseContent::Text { + text: response.content, + }]) }) } @@ -229,7 +225,7 @@ impl ClaudeMcpServer { params: EditToolParams, delegate: AcpClientDelegate, cx: &AsyncApp, - ) -> Task> { + ) -> Task> { cx.foreground_executor().spawn(async move { let response = delegate .read_text_file_reusing_snapshot(ReadTextFileParams { @@ -251,7 +247,7 @@ impl ClaudeMcpServer { }) .await?; - Ok(EditToolResponse) + Ok(()) }) } diff --git a/crates/agent_servers/src/claude/tools.rs b/crates/agent_servers/src/claude/tools.rs index 9c82139a07..75a26ee230 100644 --- a/crates/agent_servers/src/claude/tools.rs +++ b/crates/agent_servers/src/claude/tools.rs @@ -434,10 +434,6 @@ pub struct EditToolParams { pub new_text: String, } -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct EditToolResponse; - #[derive(Deserialize, JsonSchema, Debug)] pub struct ReadToolParams { /// The absolute path to the file to read. @@ -450,12 +446,6 @@ pub struct ReadToolParams { pub limit: Option, } -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ReadToolResponse { - pub content: String, -} - #[derive(Deserialize, JsonSchema, Debug)] pub struct WriteToolParams { /// Absolute path for new file