Merge branch 'main' into mcp-codex

This commit is contained in:
Agus Zubiaga 2025-07-22 19:24:10 -03:00
commit 1e5625c4b4
97 changed files with 3234 additions and 1599 deletions

18
Cargo.lock generated
View file

@ -211,6 +211,7 @@ dependencies = [
"chrono", "chrono",
"client", "client",
"collections", "collections",
"command_palette_hooks",
"component", "component",
"context_server", "context_server",
"db", "db",
@ -232,6 +233,7 @@ dependencies = [
"jsonschema", "jsonschema",
"language", "language",
"language_model", "language_model",
"language_models",
"languages", "languages",
"log", "log",
"lsp", "lsp",
@ -270,6 +272,7 @@ dependencies = [
"time_format", "time_format",
"tree-sitter-md", "tree-sitter-md",
"ui", "ui",
"ui_input",
"unindent", "unindent",
"urlencoding", "urlencoding",
"util", "util",
@ -1870,9 +1873,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"aws-smithy-runtime-api", "aws-smithy-runtime-api",
"aws-smithy-types", "aws-smithy-types",
"futures 0.3.31",
"http_client", "http_client",
"tokio",
"workspace-hack", "workspace-hack",
] ]
@ -6359,6 +6360,7 @@ dependencies = [
"buffer_diff", "buffer_diff",
"call", "call",
"chrono", "chrono",
"client",
"collections", "collections",
"command_palette_hooks", "command_palette_hooks",
"component", "component",
@ -7400,9 +7402,9 @@ dependencies = [
[[package]] [[package]]
name = "grid" name = "grid"
version = "0.14.0" version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be136d9dacc2a13cc70bb6c8f902b414fb2641f8db1314637c6b7933411a8f82" checksum = "71b01d27060ad58be4663b9e4ac9e2d4806918e8876af8912afbddd1a91d5eaa"
[[package]] [[package]]
name = "group" name = "group"
@ -7854,6 +7856,7 @@ dependencies = [
"derive_more 0.99.19", "derive_more 0.99.19",
"futures 0.3.31", "futures 0.3.31",
"http 1.3.1", "http 1.3.1",
"http-body 1.0.1",
"log", "log",
"serde", "serde",
"serde_json", "serde_json",
@ -9098,11 +9101,11 @@ dependencies = [
"client", "client",
"collections", "collections",
"component", "component",
"convert_case 0.8.0",
"copilot", "copilot",
"credentials_provider", "credentials_provider",
"deepseek", "deepseek",
"editor", "editor",
"fs",
"futures 0.3.31", "futures 0.3.31",
"google_ai", "google_ai",
"gpui", "gpui",
@ -15956,13 +15959,12 @@ dependencies = [
[[package]] [[package]]
name = "taffy" name = "taffy"
version = "0.5.1" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8b61630cba2afd2c851821add2e1bb1b7851a2436e839ab73b56558b009035e" checksum = "7aaef0ac998e6527d6d0d5582f7e43953bb17221ac75bb8eb2fcc2db3396db1c"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"grid", "grid",
"num-traits",
"serde", "serde",
"slotmap", "slotmap",
] ]

View file

@ -482,6 +482,7 @@ heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
hex = "0.4.3" hex = "0.4.3"
html5ever = "0.27.0" html5ever = "0.27.0"
http = "1.1" http = "1.1"
http-body = "1.0"
hyper = "0.14" hyper = "0.14"
ignore = "0.4.22" ignore = "0.4.22"
image = "0.25.1" image = "0.25.1"

View file

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.25669 0.999943C8.27509 0.993825 9.24655 1.42125 9.9227 2.17279C11.4427 1.85079 12.9991 2.53518 13.7733 3.86518C14.159 4.5149 14.3171 5.26409 14.2372 5.99994H13.2967C13.3789 5.42185 13.265 4.8321 12.9686 4.32514C12.2353 3.06961 10.6088 2.63919 9.33676 3.36322L6.48032 4.98822C6.46926 4.99697 6.46284 5.01135 6.46372 5.02533V6.38568L9.91294 4.42084C10.0565 4.33818 10.2336 4.33823 10.3768 4.42084L13.1502 5.99994H11.2948L9.88364 5.19623C9.87034 5.19054 9.85459 5.19128 9.84262 5.19916L8.64926 5.87983L8.8602 5.99994H7.99985C6.89539 6.00004 5.99988 6.89547 5.99985 7.99994V9.34955L3.90219 8.15522C3.75815 8.07431 3.66897 7.92228 3.66977 7.75873V4.53803C3.66977 4.50828 3.67172 4.4654 3.67172 4.44135C3.08836 4.65262 2.59832 5.0599 2.28794 5.59174C1.55635 6.84647 1.99122 8.44936 3.26059 9.17475L5.99985 10.7363V11.6162C5.87564 11.6568 5.73827 11.6456 5.6229 11.579L2.7977 9.96869C2.77156 9.95382 2.73449 9.9311 2.71372 9.91889C2.60687 10.5231 2.7194 11.1466 3.0311 11.6777C3.6435 12.7209 4.87159 13.1902 5.99985 12.9023V13.8398C4.50443 14.1233 2.98758 13.4424 2.22641 12.1347C1.71174 11.2677 1.60096 10.2237 1.9227 9.27045C0.880739 8.13295 0.703328 6.46023 1.48325 5.13373C1.98739 4.26024 2.84863 3.64401 3.84653 3.44233C4.3245 1.9837 5.70306 0.996447 7.25669 0.999943ZM7.25766 1.91498C5.78932 1.9143 4.59839 3.08914 4.59751 4.53803V7.79193C4.59926 7.80578 4.60735 7.81796 4.61997 7.82416L5.8143 8.50483L5.81626 4.57611C5.81537 4.41216 5.90431 4.2606 6.04868 4.17963L8.87387 2.56928C8.89868 2.55441 8.93612 2.53379 8.95786 2.5224C8.48035 2.13046 7.8788 1.91498 7.25766 1.91498Z" fill="black"/>
<path d="M13.5 6C14.6046 6 15.5 6.89543 15.5 8V13.5C15.5 14.6046 14.6046 15.5 13.5 15.5H8C6.89543 15.5 6 14.6046 6 13.5V8C6 6.89543 6.89543 6 8 6H13.5ZM10.8916 8.02539C10.0563 8.02539 9.33453 8.27982 8.81934 8.76562C8.30213 9.25335 8.02547 9.94371 8.02539 10.748C8.02539 11.557 8.29852 12.2492 8.81543 12.7373C9.33013 13.2232 10.0521 13.4746 10.8916 13.4746C11.9865 13.4745 12.8545 13.1022 13.3076 12.3525C13.3894 12.2176 13.4521 12.0693 13.4521 11.8857C13.4521 11.4795 13.0933 11.2773 12.7842 11.2773C12.6604 11.2774 12.5292 11.3025 12.4072 11.3779C12.2862 11.4529 12.2058 11.5586 12.1494 11.666L12.1475 11.6689C11.9677 12.0213 11.5535 12.246 10.8955 12.2461C10.4219 12.2461 10.0667 12.0932 9.83008 11.8506C9.59255 11.607 9.44141 11.2389 9.44141 10.748C9.44148 10.264 9.59319 9.89628 9.83203 9.65137C10.0702 9.40725 10.4255 9.25391 10.8916 9.25391C11.4912 9.25399 11.9415 9.50614 12.1289 9.8916V9.89062C12.1888 10.0157 12.276 10.1311 12.4023 10.2129C12.5303 10.2956 12.6724 10.3271 12.8115 10.3271C12.9661 10.3271 13.1303 10.2857 13.2627 10.1758C13.4018 10.0603 13.4746 9.89383 13.4746 9.71582C13.4746 9.61857 13.4542 9.52036 13.4199 9.42773L13.3818 9.33691C12.9749 8.49175 11.9927 8.02548 10.8916 8.02539ZM10.3203 8.97852L10.1494 9.03516C10.2095 9.01178 10.2716 8.99089 10.3359 8.97363C10.3307 8.97505 10.3256 8.97706 10.3203 8.97852ZM10.4814 8.94141C10.4969 8.9385 10.5126 8.93616 10.5283 8.93359C10.5126 8.93617 10.4969 8.9385 10.4814 8.94141ZM10.6709 8.91504C10.6819 8.91399 10.693 8.913 10.7041 8.91211C10.693 8.913 10.6819 8.91399 10.6709 8.91504Z" fill="black" fill-opacity="0.95"/>
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View file

@ -15,7 +15,7 @@
{ {
"context": "Editor && vim_mode == insert && !menu", "context": "Editor && vim_mode == insert && !menu",
"bindings": { "bindings": {
// "j k": "vim::SwitchToNormalMode" // "j k": "vim::NormalBefore"
} }
} }
] ]

View file

@ -6,7 +6,7 @@
} }
}, },
{ {
"context": "Editor", "context": "Editor && mode == full",
"bindings": { "bindings": {
"cmd-l": "go_to_line::Toggle", "cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown", "ctrl-shift-d": "editor::DuplicateLineDown",
@ -15,7 +15,12 @@
"cmd-enter": "editor::NewlineBelow", "cmd-enter": "editor::NewlineBelow",
"cmd-alt-enter": "editor::NewlineAbove", "cmd-alt-enter": "editor::NewlineAbove",
"cmd-shift-l": "editor::SelectLine", "cmd-shift-l": "editor::SelectLine",
"cmd-shift-t": "outline::Toggle", "cmd-shift-t": "outline::Toggle"
}
},
{
"context": "Editor",
"bindings": {
"alt-backspace": "editor::DeleteToPreviousWordStart", "alt-backspace": "editor::DeleteToPreviousWordStart",
"alt-shift-backspace": "editor::DeleteToNextWordEnd", "alt-shift-backspace": "editor::DeleteToNextWordEnd",
"alt-delete": "editor::DeleteToNextWordEnd", "alt-delete": "editor::DeleteToNextWordEnd",
@ -39,10 +44,6 @@
"ctrl-_": "editor::ConvertToSnakeCase" "ctrl-_": "editor::ConvertToSnakeCase"
} }
}, },
{
"context": "Editor && mode == full",
"bindings": {}
},
{ {
"context": "BufferSearchBar", "context": "BufferSearchBar",
"bindings": { "bindings": {

View file

@ -1076,6 +1076,10 @@
// Send anonymized usage data like what languages you're using Zed with. // Send anonymized usage data like what languages you're using Zed with.
"metrics": true "metrics": true
}, },
// Whether to disable all AI features in Zed.
//
// Default: false
"disable_ai": false,
// Automatically update Zed. This setting may be ignored on Linux if // Automatically update Zed. This setting may be ignored on Linux if
// installed through a package manager. // installed through a package manager.
"auto_update": true, "auto_update": true,
@ -1712,6 +1716,7 @@
"openai": { "openai": {
"api_url": "https://api.openai.com/v1" "api_url": "https://api.openai.com/v1"
}, },
"openai_compatible": {},
"open_router": { "open_router": {
"api_url": "https://openrouter.ai/api/v1" "api_url": "https://openrouter.ai/api/v1"
}, },

View file

@ -15,13 +15,15 @@
"adapter": "JavaScript", "adapter": "JavaScript",
"program": "$ZED_FILE", "program": "$ZED_FILE",
"request": "launch", "request": "launch",
"cwd": "$ZED_WORKTREE_ROOT" "cwd": "$ZED_WORKTREE_ROOT",
"type": "pwa-node"
}, },
{ {
"label": "JavaScript debug terminal", "label": "JavaScript debug terminal",
"adapter": "JavaScript", "adapter": "JavaScript",
"request": "launch", "request": "launch",
"cwd": "$ZED_WORKTREE_ROOT", "cwd": "$ZED_WORKTREE_ROOT",
"console": "integratedTerminal" "console": "integratedTerminal",
"type": "pwa-node"
} }
] ]

View file

@ -47,7 +47,7 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use thiserror::Error; use thiserror::Error;
use util::{ResultExt as _, debug_panic, post_inc}; use util::{ResultExt as _, post_inc};
use uuid::Uuid; use uuid::Uuid;
use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
@ -1582,20 +1582,18 @@ impl Thread {
model: Arc<dyn LanguageModel>, model: Arc<dyn LanguageModel>,
cx: &mut App, cx: &mut App,
) -> Option<PendingToolUse> { ) -> Option<PendingToolUse> {
let action_log = self.action_log.read(cx); // Represent notification as a simulated `project_notifications` tool call
let tool_name = Arc::from("project_notifications");
let tool = self.tools.read(cx).tool(&tool_name, cx)?;
if !action_log.has_unnotified_user_edits() { if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
return None; return None;
} }
// Represent notification as a simulated `project_notifications` tool call if self
let tool_name = Arc::from("project_notifications"); .action_log
let Some(tool) = self.tools.read(cx).tool(&tool_name, cx) else { .update(cx, |log, cx| log.unnotified_user_edits(cx).is_none())
debug_panic!("`project_notifications` tool not found"); {
return None;
};
if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
return None; return None;
} }
@ -5492,7 +5490,7 @@ fn main() {{
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx)); let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None)); let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None));
let provider = Arc::new(FakeLanguageModelProvider); let provider = Arc::new(FakeLanguageModelProvider::default());
let model = provider.test_model(); let model = provider.test_model();
let model: Arc<dyn LanguageModel> = Arc::new(model); let model: Arc<dyn LanguageModel> = Arc::new(model);

View file

@ -434,10 +434,6 @@ pub struct EditToolParams {
pub new_text: String, pub new_text: String,
} }
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EditToolResponse;
#[derive(Deserialize, JsonSchema, Debug)] #[derive(Deserialize, JsonSchema, Debug)]
pub struct ReadToolParams { pub struct ReadToolParams {
/// The absolute path to the file to read. /// The absolute path to the file to read.
@ -450,12 +446,6 @@ pub struct ReadToolParams {
pub limit: Option<u32>, pub limit: Option<u32>,
} }
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ReadToolResponse {
pub content: String,
}
#[derive(Deserialize, JsonSchema, Debug)] #[derive(Deserialize, JsonSchema, Debug)]
pub struct WriteToolParams { pub struct WriteToolParams {
/// Absolute path for new file /// Absolute path for new file

View file

@ -14,11 +14,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util::debug_panic; use util::debug_panic;
// todo! use shared tool inference? use crate::claude::tools::{ClaudeTool, EditToolParams, ReadToolParams};
use crate::{
claude::tools::{ClaudeTool, EditToolParams, ReadToolParams},
tools::{EditToolResponse, ReadToolResponse},
};
pub struct ZedMcpServer { pub struct ZedMcpServer {
server: context_server::listener::McpServer, server: context_server::listener::McpServer,
@ -196,11 +192,9 @@ impl ZedMcpServer {
let input = let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?; serde_json::from_value(request.arguments.context("Arguments required")?)?;
let result = Self::handle_read_tool_call(input, delegate, cx).await?; let content = Self::handle_read_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse { Ok(CallToolResponse {
content: vec![ToolResponseContent::Text { content,
text: serde_json::to_string(&result)?,
}],
is_error: None, is_error: None,
meta: None, meta: None,
}) })
@ -208,11 +202,9 @@ impl ZedMcpServer {
let input = let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?; serde_json::from_value(request.arguments.context("Arguments required")?)?;
let result = Self::handle_edit_tool_call(input, delegate, cx).await?; Self::handle_edit_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse { Ok(CallToolResponse {
content: vec![ToolResponseContent::Text { content: vec![],
text: serde_json::to_string(&result)?,
}],
is_error: None, is_error: None,
meta: None, meta: None,
}) })
@ -226,7 +218,7 @@ impl ZedMcpServer {
params: ReadToolParams, params: ReadToolParams,
delegate: AcpClientDelegate, delegate: AcpClientDelegate,
cx: &AsyncApp, cx: &AsyncApp,
) -> Task<Result<ReadToolResponse>> { ) -> Task<Result<Vec<ToolResponseContent>>> {
cx.foreground_executor().spawn(async move { cx.foreground_executor().spawn(async move {
let response = delegate let response = delegate
.read_text_file(ReadTextFileParams { .read_text_file(ReadTextFileParams {
@ -236,9 +228,9 @@ impl ZedMcpServer {
}) })
.await?; .await?;
Ok(ReadToolResponse { Ok(vec![ToolResponseContent::Text {
content: response.content, text: response.content,
}) }])
}) })
} }
@ -246,7 +238,7 @@ impl ZedMcpServer {
params: EditToolParams, params: EditToolParams,
delegate: AcpClientDelegate, delegate: AcpClientDelegate,
cx: &AsyncApp, cx: &AsyncApp,
) -> Task<Result<EditToolResponse>> { ) -> Task<Result<()>> {
cx.foreground_executor().spawn(async move { cx.foreground_executor().spawn(async move {
let response = delegate let response = delegate
.read_text_file_reusing_snapshot(ReadTextFileParams { .read_text_file_reusing_snapshot(ReadTextFileParams {
@ -268,7 +260,7 @@ impl ZedMcpServer {
}) })
.await?; .await?;
Ok(EditToolResponse) Ok(())
}) })
} }

View file

@ -32,6 +32,7 @@ buffer_diff.workspace = true
chrono.workspace = true chrono.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
command_palette_hooks.workspace = true
component.workspace = true component.workspace = true
context_server.workspace = true context_server.workspace = true
db.workspace = true db.workspace = true
@ -53,6 +54,7 @@ itertools.workspace = true
jsonschema.workspace = true jsonschema.workspace = true
language.workspace = true language.workspace = true
language_model.workspace = true language_model.workspace = true
language_models.workspace = true
log.workspace = true log.workspace = true
lsp.workspace = true lsp.workspace = true
markdown.workspace = true markdown.workspace = true
@ -87,6 +89,7 @@ theme.workspace = true
time.workspace = true time.workspace = true
time_format.workspace = true time_format.workspace = true
ui.workspace = true ui.workspace = true
ui_input.workspace = true
urlencoding.workspace = true urlencoding.workspace = true
util.workspace = true util.workspace = true
uuid.workspace = true uuid.workspace = true

View file

@ -3895,7 +3895,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| { LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model( registry.set_default_model(
Some(ConfiguredModel { Some(ConfiguredModel {
provider: Arc::new(FakeLanguageModelProvider), provider: Arc::new(FakeLanguageModelProvider::default()),
model, model,
}), }),
cx, cx,
@ -3979,7 +3979,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| { LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model( registry.set_default_model(
Some(ConfiguredModel { Some(ConfiguredModel {
provider: Arc::new(FakeLanguageModelProvider), provider: Arc::new(FakeLanguageModelProvider::default()),
model: model.clone(), model: model.clone(),
}), }),
cx, cx,

View file

@ -1,3 +1,4 @@
mod add_llm_provider_modal;
mod configure_context_server_modal; mod configure_context_server_modal;
mod manage_profiles_modal; mod manage_profiles_modal;
mod tool_picker; mod tool_picker;
@ -28,7 +29,7 @@ use proto::Plan;
use settings::{Settings, update_settings_file}; use settings::{Settings, update_settings_file};
use ui::{ use ui::{
Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu, Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*, Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*,
}; };
use util::ResultExt as _; use util::ResultExt as _;
use workspace::Workspace; use workspace::Workspace;
@ -37,7 +38,10 @@ use zed_actions::ExtensionCategoryFilter;
pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use manage_profiles_modal::ManageProfilesModal; pub(crate) use manage_profiles_modal::ManageProfilesModal;
use crate::AddContextServer; use crate::{
AddContextServer,
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
};
pub struct AgentConfiguration { pub struct AgentConfiguration {
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
@ -304,16 +308,55 @@ impl AgentConfiguration {
v_flex() v_flex()
.child( .child(
v_flex() h_flex()
.p(DynamicSpacing::Base16.rems(cx)) .p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx)) .pr(DynamicSpacing::Base20.rems(cx))
.pb_0() .pb_0()
.mb_2p5() .mb_2p5()
.gap_0p5() .items_start()
.child(Headline::new("LLM Providers")) .justify_between()
.child( .child(
Label::new("Add at least one provider to use AI-powered features.") v_flex()
.color(Color::Muted), .gap_0p5()
.child(Headline::new("LLM Providers"))
.child(
Label::new("Add at least one provider to use AI-powered features.")
.color(Color::Muted),
),
)
.child(
PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
menu.header("Compatible APIs").entry("OpenAI", None, {
let workspace = workspace.clone();
move |window, cx| {
workspace
.update(cx, |workspace, cx| {
AddLlmProviderModal::toggle(
LlmCompatibleProvider::OpenAi,
workspace,
window,
cx,
);
})
.log_err();
}
})
}))
}
}),
), ),
) )
.child( .child(
@ -330,119 +373,74 @@ impl AgentConfiguration {
fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement { fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions; let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions;
let fs = self.fs.clone();
h_flex() SwitchField::new(
.gap_4() "single-file-review",
.justify_between() "Enable single-file agent reviews",
.flex_wrap() "Agent edits are also displayed in single-file editors for review.",
.child( always_allow_tool_actions,
v_flex() move |state, _window, cx| {
.gap_0p5() let allow = state == &ToggleState::Selected;
.max_w_5_6() update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
.child(Label::new("Allow running editing tools without asking for confirmation")) settings.set_always_allow_tool_actions(allow);
.child( });
Label::new( },
"The agent can perform potentially destructive actions without asking for your confirmation.", )
)
.color(Color::Muted),
),
)
.child(
Switch::new(
"always-allow-tool-actions-switch",
always_allow_tool_actions.into(),
)
.color(SwitchColor::Accent)
.on_click({
let fs = self.fs.clone();
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(
fs.clone(),
cx,
move |settings, _| {
settings.set_always_allow_tool_actions(allow);
},
);
}
}),
)
} }
fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement { fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let single_file_review = AgentSettings::get_global(cx).single_file_review; let single_file_review = AgentSettings::get_global(cx).single_file_review;
let fs = self.fs.clone();
h_flex() SwitchField::new(
.gap_4() "single-file-review",
.justify_between() "Enable single-file agent reviews",
.flex_wrap() "Agent edits are also displayed in single-file editors for review.",
.child( single_file_review,
v_flex() move |state, _window, cx| {
.gap_0p5() let allow = state == &ToggleState::Selected;
.max_w_5_6() update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
.child(Label::new("Enable single-file agent reviews")) settings.set_single_file_review(allow);
.child( });
Label::new( },
"Agent edits are also displayed in single-file editors for review.", )
)
.color(Color::Muted),
),
)
.child(
Switch::new("single-file-review-switch", single_file_review.into())
.color(SwitchColor::Accent)
.on_click({
let fs = self.fs.clone();
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(
fs.clone(),
cx,
move |settings, _| {
settings.set_single_file_review(allow);
},
);
}
}),
)
} }
fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement { fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done; let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done;
let fs = self.fs.clone();
h_flex() SwitchField::new(
.gap_4() "sound-notification",
.justify_between() "Play sound when finished generating",
.flex_wrap() "Hear a notification sound when the agent is done generating changes or needs your input.",
.child( play_sound_when_agent_done,
v_flex() move |state, _window, cx| {
.gap_0p5() let allow = state == &ToggleState::Selected;
.max_w_5_6() update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
.child(Label::new("Play sound when finished generating")) settings.set_play_sound_when_agent_done(allow);
.child( });
Label::new( },
"Hear a notification sound when the agent is done generating changes or needs your input.", )
) }
.color(Color::Muted),
), fn render_modifier_to_send(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
) let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send;
.child( let fs = self.fs.clone();
Switch::new("play-sound-notification-switch", play_sound_when_agent_done.into())
.color(SwitchColor::Accent) SwitchField::new(
.on_click({ "modifier-send",
let fs = self.fs.clone(); "Use modifier to submit a message",
move |state, _window, cx| { "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux) required to send messages.",
let allow = state == &ToggleState::Selected; use_modifier_to_send,
update_settings_file::<AgentSettings>( move |state, _window, cx| {
fs.clone(), let allow = state == &ToggleState::Selected;
cx, update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
move |settings, _| { settings.set_use_modifier_to_send(allow);
settings.set_play_sound_when_agent_done(allow); });
}, },
); )
}
}),
)
} }
fn render_general_settings_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement { fn render_general_settings_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
@ -456,6 +454,7 @@ impl AgentConfiguration {
.child(self.render_command_permission(cx)) .child(self.render_command_permission(cx))
.child(self.render_single_file_review(cx)) .child(self.render_single_file_review(cx))
.child(self.render_sound_notification(cx)) .child(self.render_sound_notification(cx))
.child(self.render_modifier_to_send(cx))
} }
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement { fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {

View file

@ -0,0 +1,639 @@
use std::sync::Arc;
use anyhow::Result;
use collections::HashSet;
use fs::Fs;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task};
use language_model::LanguageModelRegistry;
use language_models::{
AllLanguageModelSettings, OpenAiCompatibleSettingsContent,
provider::open_ai_compatible::AvailableModel,
};
use settings::update_settings_file;
use ui::{Banner, KeyBinding, Modal, ModalFooter, ModalHeader, Section, prelude::*};
use ui_input::SingleLineInput;
use workspace::{ModalView, Workspace};
#[derive(Clone, Copy)]
pub enum LlmCompatibleProvider {
OpenAi,
}
impl LlmCompatibleProvider {
fn name(&self) -> &'static str {
match self {
LlmCompatibleProvider::OpenAi => "OpenAI",
}
}
fn api_url(&self) -> &'static str {
match self {
LlmCompatibleProvider::OpenAi => "https://api.openai.com/v1",
}
}
}
struct AddLlmProviderInput {
provider_name: Entity<SingleLineInput>,
api_url: Entity<SingleLineInput>,
api_key: Entity<SingleLineInput>,
models: Vec<ModelInput>,
}
impl AddLlmProviderInput {
fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self {
let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx);
let api_url = single_line_input("API URL", provider.api_url(), None, window, cx);
let api_key = single_line_input(
"API Key",
"000000000000000000000000000000000000000000000000",
None,
window,
cx,
);
Self {
provider_name,
api_url,
api_key,
models: vec![ModelInput::new(window, cx)],
}
}
fn add_model(&mut self, window: &mut Window, cx: &mut App) {
self.models.push(ModelInput::new(window, cx));
}
fn remove_model(&mut self, index: usize) {
self.models.remove(index);
}
}
struct ModelInput {
name: Entity<SingleLineInput>,
max_completion_tokens: Entity<SingleLineInput>,
max_output_tokens: Entity<SingleLineInput>,
max_tokens: Entity<SingleLineInput>,
}
impl ModelInput {
fn new(window: &mut Window, cx: &mut App) -> Self {
let model_name = single_line_input(
"Model Name",
"e.g. gpt-4o, claude-opus-4, gemini-2.5-pro",
None,
window,
cx,
);
let max_completion_tokens = single_line_input(
"Max Completion Tokens",
"200000",
Some("200000"),
window,
cx,
);
let max_output_tokens = single_line_input(
"Max Output Tokens",
"Max Output Tokens",
Some("32000"),
window,
cx,
);
let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx);
Self {
name: model_name,
max_completion_tokens,
max_output_tokens,
max_tokens,
}
}
fn parse(&self, cx: &App) -> Result<AvailableModel, SharedString> {
let name = self.name.read(cx).text(cx);
if name.is_empty() {
return Err(SharedString::from("Model Name cannot be empty"));
}
Ok(AvailableModel {
name,
display_name: None,
max_completion_tokens: Some(
self.max_completion_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Completion Tokens must be a number"))?,
),
max_output_tokens: Some(
self.max_output_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Output Tokens must be a number"))?,
),
max_tokens: self
.max_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Tokens must be a number"))?,
})
}
}
fn single_line_input(
label: impl Into<SharedString>,
placeholder: impl Into<SharedString>,
text: Option<&str>,
window: &mut Window,
cx: &mut App,
) -> Entity<SingleLineInput> {
cx.new(|cx| {
let input = SingleLineInput::new(window, cx, placeholder).label(label);
if let Some(text) = text {
input
.editor()
.update(cx, |editor, cx| editor.set_text(text, window, cx));
}
input
})
}
fn save_provider_to_settings(
input: &AddLlmProviderInput,
cx: &mut App,
) -> Task<Result<(), SharedString>> {
let provider_name: Arc<str> = input.provider_name.read(cx).text(cx).into();
if provider_name.is_empty() {
return Task::ready(Err("Provider Name cannot be empty".into()));
}
if LanguageModelRegistry::read_global(cx)
.providers()
.iter()
.any(|provider| {
provider.id().0.as_ref() == provider_name.as_ref()
|| provider.name().0.as_ref() == provider_name.as_ref()
})
{
return Task::ready(Err(
"Provider Name is already taken by another provider".into()
));
}
let api_url = input.api_url.read(cx).text(cx);
if api_url.is_empty() {
return Task::ready(Err("API URL cannot be empty".into()));
}
let api_key = input.api_key.read(cx).text(cx);
if api_key.is_empty() {
return Task::ready(Err("API Key cannot be empty".into()));
}
let mut models = Vec::new();
let mut model_names: HashSet<String> = HashSet::default();
for model in &input.models {
match model.parse(cx) {
Ok(model) => {
if !model_names.insert(model.name.clone()) {
return Task::ready(Err("Model Names must be unique".into()));
}
models.push(model)
}
Err(err) => return Task::ready(Err(err)),
}
}
let fs = <dyn Fs>::global(cx);
let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes());
cx.spawn(async move |cx| {
task.await
.map_err(|_| "Failed to write API key to keychain")?;
cx.update(|cx| {
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
settings.openai_compatible.get_or_insert_default().insert(
provider_name,
OpenAiCompatibleSettingsContent {
api_url,
available_models: models,
},
);
});
})
.ok();
Ok(())
})
}
pub struct AddLlmProviderModal {
provider: LlmCompatibleProvider,
input: AddLlmProviderInput,
focus_handle: FocusHandle,
last_error: Option<SharedString>,
}
impl AddLlmProviderModal {
pub fn toggle(
provider: LlmCompatibleProvider,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
workspace.toggle_modal(window, cx, |window, cx| Self::new(provider, window, cx));
}
fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut Context<Self>) -> Self {
Self {
input: AddLlmProviderInput::new(provider, window, cx),
provider,
last_error: None,
focus_handle: cx.focus_handle(),
}
}
fn confirm(&mut self, _: &menu::Confirm, _: &mut Window, cx: &mut Context<Self>) {
let task = save_provider_to_settings(&self.input, cx);
cx.spawn(async move |this, cx| {
let result = task.await;
this.update(cx, |this, cx| match result {
Ok(_) => {
cx.emit(DismissEvent);
}
Err(error) => {
this.last_error = Some(error);
cx.notify();
}
})
})
.detach_and_log_err(cx);
}
fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
cx.emit(DismissEvent);
}
fn render_section(&self) -> Section {
Section::new()
.child(self.input.provider_name.clone())
.child(self.input.api_url.clone())
.child(self.input.api_key.clone())
}
fn render_model_section(&self, cx: &mut Context<Self>) -> Section {
Section::new().child(
v_flex()
.gap_2()
.child(
h_flex()
.justify_between()
.child(Label::new("Models").size(LabelSize::Small))
.child(
Button::new("add-model", "Add Model")
.icon(IconName::Plus)
.icon_position(IconPosition::Start)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.on_click(cx.listener(|this, _, window, cx| {
this.input.add_model(window, cx);
cx.notify();
})),
),
)
.children(
self.input
.models
.iter()
.enumerate()
.map(|(ix, _)| self.render_model(ix, cx)),
),
)
}
fn render_model(&self, ix: usize, cx: &mut Context<Self>) -> impl IntoElement + use<> {
let has_more_than_one_model = self.input.models.len() > 1;
let model = &self.input.models[ix];
v_flex()
.p_2()
.gap_2()
.rounded_sm()
.border_1()
.border_dashed()
.border_color(cx.theme().colors().border.opacity(0.6))
.bg(cx.theme().colors().element_active.opacity(0.15))
.child(model.name.clone())
.child(
h_flex()
.gap_2()
.child(model.max_completion_tokens.clone())
.child(model.max_output_tokens.clone()),
)
.child(model.max_tokens.clone())
.when(has_more_than_one_model, |this| {
this.child(
Button::new(("remove-model", ix), "Remove Model")
.icon(IconName::Trash)
.icon_position(IconPosition::Start)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.style(ButtonStyle::Outlined)
.full_width()
.on_click(cx.listener(move |this, _, _window, cx| {
this.input.remove_model(ix);
cx.notify();
})),
)
})
}
}
impl EventEmitter<DismissEvent> for AddLlmProviderModal {}
impl Focusable for AddLlmProviderModal {
fn focus_handle(&self, _cx: &App) -> FocusHandle {
self.focus_handle.clone()
}
}
impl ModalView for AddLlmProviderModal {}
impl Render for AddLlmProviderModal {
fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
let focus_handle = self.focus_handle(cx);
div()
.id("add-llm-provider-modal")
.key_context("AddLlmProviderModal")
.w(rems(34.))
.elevation_3(cx)
.on_action(cx.listener(Self::cancel))
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
this.focus_handle(cx).focus(window);
}))
.child(
Modal::new("configure-context-server", None)
.header(ModalHeader::new().headline("Add LLM Provider").description(
match self.provider {
LlmCompatibleProvider::OpenAi => {
"This provider will use an OpenAI compatible API."
}
},
))
.when_some(self.last_error.clone(), |this, error| {
this.section(
Section::new().child(
Banner::new()
.severity(ui::Severity::Warning)
.child(div().text_xs().child(error)),
),
)
})
.child(
v_flex()
.id("modal_content")
.max_h_128()
.overflow_y_scroll()
.gap_2()
.child(self.render_section())
.child(self.render_model_section(cx)),
)
.footer(
ModalFooter::new().end_slot(
h_flex()
.gap_1()
.child(
Button::new("cancel", "Cancel")
.key_binding(
KeyBinding::for_action_in(
&menu::Cancel,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(cx.listener(|this, _event, window, cx| {
this.cancel(&menu::Cancel, window, cx)
})),
)
.child(
Button::new("save-server", "Save Provider")
.key_binding(
KeyBinding::for_action_in(
&menu::Confirm,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(cx.listener(|this, _event, window, cx| {
this.confirm(&menu::Confirm, window, cx)
})),
),
),
),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use editor::EditorSettings;
use fs::FakeFs;
use gpui::{TestAppContext, VisualTestContext};
use language::language_settings;
use language_model::{
LanguageModelProviderId, LanguageModelProviderName,
fake_provider::FakeLanguageModelProvider,
};
use project::Project;
use settings::{Settings as _, SettingsStore};
use util::path;
#[gpui::test]
async fn test_save_provider_invalid_inputs(cx: &mut TestAppContext) {
let cx = setup_test(cx).await;
assert_eq!(
save_provider_validation_errors("", "someurl", "somekey", vec![], cx,).await,
Some("Provider Name cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors("someprovider", "", "somekey", vec![], cx,).await,
Some("API URL cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors("someprovider", "someurl", "", vec![], cx,).await,
Some("API Key cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("", "200000", "200000", "32000")],
cx,
)
.await,
Some("Model Name cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "abc", "200000", "32000")],
cx,
)
.await,
Some("Max Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "200000", "abc", "32000")],
cx,
)
.await,
Some("Max Completion Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "200000", "200000", "abc")],
cx,
)
.await,
Some("Max Output Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![
("somemodel", "200000", "200000", "32000"),
("somemodel", "200000", "200000", "32000"),
],
cx,
)
.await,
Some("Model Names must be unique".into())
);
}
#[gpui::test]
async fn test_save_provider_name_conflict(cx: &mut TestAppContext) {
let cx = setup_test(cx).await;
cx.update(|_window, cx| {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.register_provider(
FakeLanguageModelProvider::new(
LanguageModelProviderId::new("someprovider"),
LanguageModelProviderName::new("Some Provider"),
),
cx,
);
});
});
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"someapikey",
vec![("somemodel", "200000", "200000", "32000")],
cx,
)
.await,
Some("Provider Name is already taken by another provider".into())
);
}
async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext {
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
workspace::init_settings(cx);
Project::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
language_settings::init(cx);
EditorSettings::register(cx);
language_model::init_settings(cx);
language_models::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
cx.update(|cx| <dyn Fs>::set_global(fs.clone(), cx));
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let (_, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
cx
}
async fn save_provider_validation_errors(
provider_name: &str,
api_url: &str,
api_key: &str,
models: Vec<(&str, &str, &str, &str)>,
cx: &mut VisualTestContext,
) -> Option<SharedString> {
fn set_text(
input: &Entity<SingleLineInput>,
text: &str,
window: &mut Window,
cx: &mut App,
) {
input.update(cx, |input, cx| {
input.editor().update(cx, |editor, cx| {
editor.set_text(text, window, cx);
});
});
}
let task = cx.update(|window, cx| {
let mut input = AddLlmProviderInput::new(LlmCompatibleProvider::OpenAi, window, cx);
set_text(&input.provider_name, provider_name, window, cx);
set_text(&input.api_url, api_url, window, cx);
set_text(&input.api_key, api_key, window, cx);
for (i, (name, max_tokens, max_completion_tokens, max_output_tokens)) in
models.iter().enumerate()
{
if i >= input.models.len() {
input.models.push(ModelInput::new(window, cx));
}
let model = &mut input.models[i];
set_text(&model.name, name, window, cx);
set_text(&model.max_tokens, max_tokens, window, cx);
set_text(
&model.max_completion_tokens,
max_completion_tokens,
window,
cx,
);
set_text(&model.max_output_tokens, max_output_tokens, window, cx);
}
save_provider_to_settings(&input, cx)
});
task.await.err()
}
}

View file

@ -43,7 +43,7 @@ use anyhow::{Result, anyhow};
use assistant_context::{AssistantContext, ContextEvent, ContextSummary}; use assistant_context::{AssistantContext, ContextEvent, ContextSummary};
use assistant_slash_command::SlashCommandWorkingSet; use assistant_slash_command::SlashCommandWorkingSet;
use assistant_tool::ToolWorkingSet; use assistant_tool::ToolWorkingSet;
use client::{UserStore, zed_urls}; use client::{DisableAiSettings, UserStore, zed_urls};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use feature_flags::{self, FeatureFlagAppExt}; use feature_flags::{self, FeatureFlagAppExt};
use fs::Fs; use fs::Fs;
@ -744,6 +744,7 @@ impl AgentPanel {
if workspace if workspace
.panel::<Self>(cx) .panel::<Self>(cx)
.is_some_and(|panel| panel.read(cx).enabled(cx)) .is_some_and(|panel| panel.read(cx).enabled(cx))
&& !DisableAiSettings::get_global(cx).disable_ai
{ {
workspace.toggle_panel_focus::<Self>(window, cx); workspace.toggle_panel_focus::<Self>(window, cx);
} }
@ -1665,7 +1666,10 @@ impl Panel for AgentPanel {
} }
fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> { fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
(self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant) (self.enabled(cx)
&& AgentSettings::get_global(cx).button
&& !DisableAiSettings::get_global(cx).disable_ai)
.then_some(IconName::ZedAssistant)
} }
fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {

View file

@ -31,7 +31,8 @@ use std::sync::Arc;
use agent::{Thread, ThreadId}; use agent::{Thread, ThreadId};
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection}; use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection};
use assistant_slash_command::SlashCommandRegistry; use assistant_slash_command::SlashCommandRegistry;
use client::Client; use client::{Client, DisableAiSettings};
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt as _; use feature_flags::FeatureFlagAppExt as _;
use fs::Fs; use fs::Fs;
use gpui::{Action, App, Entity, actions}; use gpui::{Action, App, Entity, actions};
@ -43,6 +44,7 @@ use prompt_store::PromptBuilder;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore}; use settings::{Settings as _, SettingsStore};
use std::any::TypeId;
pub use crate::active_thread::ActiveThread; pub use crate::active_thread::ActiveThread;
use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
@ -52,6 +54,7 @@ use crate::slash_command_settings::SlashCommandSettings;
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor}; pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor};
pub use ui::preview::{all_agent_previews, get_agent_preview}; pub use ui::preview::{all_agent_previews, get_agent_preview};
use zed_actions;
actions!( actions!(
agent, agent,
@ -243,6 +246,66 @@ pub fn init(
}) })
.detach(); .detach();
cx.observe_new(ManageProfilesModal::register).detach(); cx.observe_new(ManageProfilesModal::register).detach();
// Update command palette filter based on AI settings
update_command_palette_filter(cx);
// Watch for settings changes
cx.observe_global::<SettingsStore>(|app_cx| {
// When settings change, update the command palette filter
update_command_palette_filter(app_cx);
})
.detach();
}
fn update_command_palette_filter(cx: &mut App) {
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
CommandPaletteFilter::update_global(cx, |filter, _| {
if disable_ai {
filter.hide_namespace("agent");
filter.hide_namespace("assistant");
filter.hide_namespace("zed_predict_onboarding");
filter.hide_namespace("edit_prediction");
use editor::actions::{
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
};
let edit_prediction_actions = [
TypeId::of::<AcceptEditPrediction>(),
TypeId::of::<AcceptPartialEditPrediction>(),
TypeId::of::<ShowEditPrediction>(),
TypeId::of::<NextEditPrediction>(),
TypeId::of::<PreviousEditPrediction>(),
TypeId::of::<ToggleEditPrediction>(),
];
filter.hide_action_types(&edit_prediction_actions);
filter.hide_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
} else {
filter.show_namespace("agent");
filter.show_namespace("assistant");
filter.show_namespace("zed_predict_onboarding");
filter.show_namespace("edit_prediction");
use editor::actions::{
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
};
let edit_prediction_actions = [
TypeId::of::<AcceptEditPrediction>(),
TypeId::of::<AcceptPartialEditPrediction>(),
TypeId::of::<ShowEditPrediction>(),
TypeId::of::<NextEditPrediction>(),
TypeId::of::<PreviousEditPrediction>(),
TypeId::of::<ToggleEditPrediction>(),
];
filter.show_action_types(edit_prediction_actions.iter());
filter
.show_action_types([TypeId::of::<zed_actions::OpenZedPredictOnboarding>()].iter());
}
});
} }
fn init_language_model_settings(cx: &mut App) { fn init_language_model_settings(cx: &mut App) {

View file

@ -16,7 +16,7 @@ use agent::{
}; };
use agent_settings::AgentSettings; use agent_settings::AgentSettings;
use anyhow::{Context as _, Result}; use anyhow::{Context as _, Result};
use client::telemetry::Telemetry; use client::{DisableAiSettings, telemetry::Telemetry};
use collections::{HashMap, HashSet, VecDeque, hash_map}; use collections::{HashMap, HashSet, VecDeque, hash_map};
use editor::SelectionEffects; use editor::SelectionEffects;
use editor::{ use editor::{
@ -57,6 +57,17 @@ pub fn init(
cx: &mut App, cx: &mut App,
) { ) {
cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry)); cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry));
cx.observe_global::<SettingsStore>(|cx| {
if DisableAiSettings::get_global(cx).disable_ai {
// Hide any active inline assist UI when AI is disabled
InlineAssistant::update_global(cx, |assistant, cx| {
assistant.cancel_all_active_completions(cx);
});
}
})
.detach();
cx.observe_new(|_workspace: &mut Workspace, window, cx| { cx.observe_new(|_workspace: &mut Workspace, window, cx| {
let Some(window) = window else { let Some(window) = window else {
return; return;
@ -141,6 +152,26 @@ impl InlineAssistant {
.detach(); .detach();
} }
/// Hides all active inline assists when AI is disabled
pub fn cancel_all_active_completions(&mut self, cx: &mut App) {
// Cancel all active completions in editors
for (editor_handle, _) in self.assists_by_editor.iter() {
if let Some(editor) = editor_handle.upgrade() {
let windows = cx.windows();
if !windows.is_empty() {
let window = windows[0];
let _ = window.update(cx, |_, window, cx| {
editor.update(cx, |editor, cx| {
if editor.has_active_inline_completion() {
editor.cancel(&Default::default(), window, cx);
}
});
});
}
}
}
}
fn handle_workspace_event( fn handle_workspace_event(
&mut self, &mut self,
workspace: Entity<Workspace>, workspace: Entity<Workspace>,
@ -176,7 +207,7 @@ impl InlineAssistant {
window: &mut Window, window: &mut Window,
cx: &mut App, cx: &mut App,
) { ) {
let is_assistant2_enabled = true; let is_assistant2_enabled = !DisableAiSettings::get_global(cx).disable_ai;
if let Some(editor) = item.act_as::<Editor>(cx) { if let Some(editor) = item.act_as::<Editor>(cx) {
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
@ -199,6 +230,13 @@ impl InlineAssistant {
cx, cx,
); );
if DisableAiSettings::get_global(cx).disable_ai {
// Cancel any active completions
if editor.has_active_inline_completion() {
editor.cancel(&Default::default(), window, cx);
}
}
// Remove the Assistant1 code action provider, as it still might be registered. // Remove the Assistant1 code action provider, as it still might be registered.
editor.remove_code_action_provider("assistant".into(), window, cx); editor.remove_code_action_provider("assistant".into(), window, cx);
} else { } else {
@ -219,7 +257,7 @@ impl InlineAssistant {
cx: &mut Context<Workspace>, cx: &mut Context<Workspace>,
) { ) {
let settings = AgentSettings::get_global(cx); let settings = AgentSettings::get_global(cx);
if !settings.enabled { if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai {
return; return;
} }

View file

@ -38,10 +38,6 @@ impl ApiKeysWithProviders {
.map(|provider| (provider.icon(), provider.name().0.clone())) .map(|provider| (provider.icon(), provider.name().0.clone()))
.collect() .collect()
} }
pub fn has_providers(&self) -> bool {
!self.configured_providers.is_empty()
}
} }
impl Render for ApiKeysWithProviders { impl Render for ApiKeysWithProviders {
@ -53,11 +49,10 @@ impl Render for ApiKeysWithProviders {
.map(|(icon, name)| { .map(|(icon, name)| {
h_flex() h_flex()
.gap_1p5() .gap_1p5()
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
.child(Label::new(name)) .child(Label::new(name))
}); });
div()
h_flex()
.mx_2p5() .mx_2p5()
.p_1() .p_1()
.pb_0() .pb_0()
@ -85,8 +80,24 @@ impl Render for ApiKeysWithProviders {
.border_x_1() .border_x_1()
.border_color(cx.theme().colors().border) .border_color(cx.theme().colors().border)
.bg(cx.theme().colors().panel_background) .bg(cx.theme().colors().panel_background)
.child(Icon::new(IconName::Info).size(IconSize::XSmall).color(Color::Muted)) .child(
.child(Label::new("Or start now using API keys from your environment for the following providers:").color(Color::Muted)) h_flex()
.min_w_0()
.gap_2()
.child(
Icon::new(IconName::Info)
.size(IconSize::XSmall)
.color(Color::Muted)
)
.child(
div()
.w_full()
.child(
Label::new("Start now using API keys from your environment for the following providers:")
.color(Color::Muted)
)
)
)
.children(configured_providers_list) .children(configured_providers_list)
) )
} }
@ -118,7 +129,7 @@ impl RenderOnce for ApiKeysWithoutProviders {
.child(Divider::horizontal()), .child(Divider::horizontal()),
) )
.child(List::new().child(BulletItem::new( .child(List::new().child(BulletItem::new(
"You can also use AI in Zed by bringing your own API keys", "Add your own keys to use AI without signing in.",
))) )))
.child( .child(
Button::new("configure-providers", "Configure Providers") Button::new("configure-providers", "Configure Providers")

View file

@ -141,22 +141,18 @@ impl ZedAiOnboarding {
) )
.child( .child(
List::new() List::new()
.child(BulletItem::new("50 prompts per month with Claude models"))
.child(BulletItem::new( .child(BulletItem::new(
"50 prompts per month with the Claude models", "2,000 accepted edit predictions with Zeta, our open-source model",
))
.child(BulletItem::new(
"2000 accepted edit predictions using our open-source Zeta model",
)), )),
) )
} }
fn pro_trial_definition(&self) -> impl IntoElement { fn pro_trial_definition(&self) -> impl IntoElement {
List::new() List::new()
.child(BulletItem::new("150 prompts with Claude models"))
.child(BulletItem::new( .child(BulletItem::new(
"150 prompts per month with the Claude models", "Unlimited accepted edit predictions with Zeta, our open-source model",
))
.child(BulletItem::new(
"Unlimited accepted edit predictions using our open-source Zeta model",
)) ))
} }
@ -178,12 +174,12 @@ impl ZedAiOnboarding {
List::new() List::new()
.child(BulletItem::new("500 prompts per month with Claude models")) .child(BulletItem::new("500 prompts per month with Claude models"))
.child(BulletItem::new( .child(BulletItem::new(
"Unlimited accepted edit predictions using our open-source Zeta model", "Unlimited accepted edit predictions with Zeta, our open-source model",
)) ))
.child(BulletItem::new("USD $20 per month")), .child(BulletItem::new("$20 USD per month")),
) )
.child( .child(
Button::new("pro", "Start with Pro") Button::new("pro", "Get Started")
.full_width() .full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent)) .style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| { .on_click(move |_, _window, cx| {
@ -206,11 +202,11 @@ impl ZedAiOnboarding {
List::new() List::new()
.child(self.pro_trial_definition()) .child(self.pro_trial_definition())
.child(BulletItem::new( .child(BulletItem::new(
"Try it out for 14 days with no charge and no credit card required", "Try it out for 14 days for free, no credit card required",
)), )),
) )
.child( .child(
Button::new("pro", "Start Pro Trial") Button::new("pro", "Start Free Trial")
.full_width() .full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent)) .style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| { .on_click(move |_, _window, cx| {
@ -225,14 +221,14 @@ impl ZedAiOnboarding {
v_flex() v_flex()
.gap_1() .gap_1()
.w_full() .w_full()
.child(Headline::new("Before starting…")) .child(Headline::new("Accept Terms of Service"))
.child( .child(
Label::new("Make sure you have read and accepted Zed AI's terms of service.") Label::new("We dont sell your data, track you across the web, or compromise your privacy.")
.color(Color::Muted) .color(Color::Muted)
.mb_2(), .mb_2(),
) )
.child( .child(
Button::new("terms_of_service", "View and Read the Terms of Service") Button::new("terms_of_service", "Review Terms of Service")
.full_width() .full_width()
.style(ButtonStyle::Outlined) .style(ButtonStyle::Outlined)
.icon(IconName::ArrowUpRight) .icon(IconName::ArrowUpRight)
@ -241,7 +237,7 @@ impl ZedAiOnboarding {
.on_click(move |_, _window, cx| cx.open_url(&zed_urls::terms_of_service(cx))), .on_click(move |_, _window, cx| cx.open_url(&zed_urls::terms_of_service(cx))),
) )
.child( .child(
Button::new("accept_terms", "I've read it and accept it") Button::new("accept_terms", "Accept")
.full_width() .full_width()
.style(ButtonStyle::Tinted(TintColor::Accent)) .style(ButtonStyle::Tinted(TintColor::Accent))
.on_click({ .on_click({
@ -259,13 +255,13 @@ impl ZedAiOnboarding {
.gap_1() .gap_1()
.child(Headline::new("Welcome to Zed AI")) .child(Headline::new("Welcome to Zed AI"))
.child( .child(
Label::new("Sign in to start using AI in Zed with a free trial of the Pro plan, which includes:") Label::new("Sign in to try Zed Pro for 14 days, no credit card required.")
.color(Color::Muted) .color(Color::Muted)
.mb_2(), .mb_2(),
) )
.child(self.pro_trial_definition()) .child(self.pro_trial_definition())
.child( .child(
Button::new("sign_in", "Sign in to Start Trial") Button::new("sign_in", "Try Zed Pro for Free")
.disabled(signing_in) .disabled(signing_in)
.full_width() .full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent)) .style(ButtonStyle::Tinted(ui::TintColor::Accent))
@ -284,11 +280,6 @@ impl ZedAiOnboarding {
.relative() .relative()
.gap_1() .gap_1()
.child(Headline::new("Welcome to Zed AI")) .child(Headline::new("Welcome to Zed AI"))
.child(
Label::new("Choose how you want to start.")
.color(Color::Muted)
.mb_2(),
)
.map(|this| { .map(|this| {
if self.account_too_young { if self.account_too_young {
this.child(young_account_banner) this.child(young_account_banner)
@ -318,7 +309,7 @@ impl ZedAiOnboarding {
v_flex() v_flex()
.relative() .relative()
.gap_1() .gap_1()
.child(Headline::new("Welcome to the Zed Pro free trial")) .child(Headline::new("Welcome to the Zed Pro Trial"))
.child( .child(
Label::new("Here's what you get for the next 14 days:") Label::new("Here's what you get for the next 14 days:")
.color(Color::Muted) .color(Color::Muted)

View file

@ -6,7 +6,7 @@ pub struct YoungAccountBanner;
impl RenderOnce for YoungAccountBanner { impl RenderOnce for YoungAccountBanner {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing@zed.dev."; const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing-support@zed.dev.";
let label = div() let label = div()
.w_full() .w_full()

View file

@ -1323,7 +1323,7 @@ fn setup_context_editor_with_fake_model(
) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) { ) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) {
let registry = Arc::new(LanguageRegistry::test(cx.executor().clone())); let registry = Arc::new(LanguageRegistry::test(cx.executor().clone()));
let fake_provider = Arc::new(FakeLanguageModelProvider); let fake_provider = Arc::new(FakeLanguageModelProvider::default());
let fake_model = Arc::new(fake_provider.test_model()); let fake_model = Arc::new(fake_provider.test_model());
cx.update(|cx| { cx.update(|cx| {

View file

@ -51,23 +51,13 @@ impl ActionLog {
Some(self.tracked_buffers.get(buffer)?.snapshot.clone()) Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
} }
pub fn has_unnotified_user_edits(&self) -> bool {
self.tracked_buffers
.values()
.any(|tracked| tracked.has_unnotified_user_edits)
}
/// Return a unified diff patch with user edits made since last read or notification /// Return a unified diff patch with user edits made since last read or notification
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> { pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
if !self.has_unnotified_user_edits() { let diffs = self
return None;
}
let unified_diff = self
.tracked_buffers .tracked_buffers
.values() .values()
.filter_map(|tracked| { .filter_map(|tracked| {
if !tracked.has_unnotified_user_edits { if !tracked.may_have_unnotified_user_edits {
return None; return None;
} }
@ -95,9 +85,13 @@ impl ActionLog {
Some(result) Some(result)
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>();
.join("\n\n");
if diffs.is_empty() {
return None;
}
let unified_diff = diffs.join("\n\n");
Some(unified_diff) Some(unified_diff)
} }
@ -106,7 +100,7 @@ impl ActionLog {
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> { pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
let patch = self.unnotified_user_edits(cx); let patch = self.unnotified_user_edits(cx);
self.tracked_buffers.values_mut().for_each(|tracked| { self.tracked_buffers.values_mut().for_each(|tracked| {
tracked.has_unnotified_user_edits = false; tracked.may_have_unnotified_user_edits = false;
tracked.last_seen_base = tracked.diff_base.clone(); tracked.last_seen_base = tracked.diff_base.clone();
}); });
patch patch
@ -185,7 +179,7 @@ impl ActionLog {
version: buffer.read(cx).version(), version: buffer.read(cx).version(),
diff, diff,
diff_update: diff_update_tx, diff_update: diff_update_tx,
has_unnotified_user_edits: false, may_have_unnotified_user_edits: false,
_open_lsp_handle: open_lsp_handle, _open_lsp_handle: open_lsp_handle,
_maintain_diff: cx.spawn({ _maintain_diff: cx.spawn({
let buffer = buffer.clone(); let buffer = buffer.clone();
@ -337,27 +331,34 @@ impl ActionLog {
let new_snapshot = buffer_snapshot.clone(); let new_snapshot = buffer_snapshot.clone();
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone(); let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot); let edits = diff_snapshots(&old_snapshot, &new_snapshot);
if let ChangeAuthor::User = author let mut has_user_changes = false;
&& !edits.is_empty()
{
tracked_buffer.has_unnotified_user_edits = true;
}
async move { async move {
if let ChangeAuthor::User = author { if let ChangeAuthor::User = author {
apply_non_conflicting_edits( has_user_changes = apply_non_conflicting_edits(
&unreviewed_edits, &unreviewed_edits,
edits, edits,
&mut base_text, &mut base_text,
new_snapshot.as_rope(), new_snapshot.as_rope(),
); );
} }
(Arc::new(base_text.to_string()), base_text)
(Arc::new(base_text.to_string()), base_text, has_user_changes)
} }
}); });
anyhow::Ok(rebase) anyhow::Ok(rebase)
})??; })??;
let (new_base_text, new_diff_base) = rebase.await; let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
this.update(cx, |this, _| {
let tracked_buffer = this
.tracked_buffers
.get_mut(buffer)
.context("buffer not tracked")
.unwrap();
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
})?;
Self::update_diff( Self::update_diff(
this, this,
buffer, buffer,
@ -829,11 +830,12 @@ fn apply_non_conflicting_edits(
edits: Vec<Edit<u32>>, edits: Vec<Edit<u32>>,
old_text: &mut Rope, old_text: &mut Rope,
new_text: &Rope, new_text: &Rope,
) { ) -> bool {
let mut old_edits = patch.edits().iter().cloned().peekable(); let mut old_edits = patch.edits().iter().cloned().peekable();
let mut new_edits = edits.into_iter().peekable(); let mut new_edits = edits.into_iter().peekable();
let mut applied_delta = 0i32; let mut applied_delta = 0i32;
let mut rebased_delta = 0i32; let mut rebased_delta = 0i32;
let mut has_made_changes = false;
while let Some(mut new_edit) = new_edits.next() { while let Some(mut new_edit) = new_edits.next() {
let mut conflict = false; let mut conflict = false;
@ -883,8 +885,10 @@ fn apply_non_conflicting_edits(
&new_text.chunks_in_range(new_bytes).collect::<String>(), &new_text.chunks_in_range(new_bytes).collect::<String>(),
); );
applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
has_made_changes = true;
} }
} }
has_made_changes
} }
fn diff_snapshots( fn diff_snapshots(
@ -958,7 +962,7 @@ struct TrackedBuffer {
diff: Entity<BufferDiff>, diff: Entity<BufferDiff>,
snapshot: text::BufferSnapshot, snapshot: text::BufferSnapshot,
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>, diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
has_unnotified_user_edits: bool, may_have_unnotified_user_edits: bool,
_open_lsp_handle: OpenLspBufferHandle, _open_lsp_handle: OpenLspBufferHandle,
_maintain_diff: Task<()>, _maintain_diff: Task<()>,
_subscription: Subscription, _subscription: Subscription,

View file

@ -20,6 +20,7 @@ anyhow.workspace = true
assistant_tool.workspace = true assistant_tool.workspace = true
buffer_diff.workspace = true buffer_diff.workspace = true
chrono.workspace = true chrono.workspace = true
client.workspace = true
collections.workspace = true collections.workspace = true
component.workspace = true component.workspace = true
derive_more.workspace = true derive_more.workspace = true

View file

@ -20,14 +20,13 @@ mod thinking_tool;
mod ui; mod ui;
mod web_search_tool; mod web_search_tool;
use std::sync::Arc;
use assistant_tool::ToolRegistry; use assistant_tool::ToolRegistry;
use copy_path_tool::CopyPathTool; use copy_path_tool::CopyPathTool;
use gpui::{App, Entity}; use gpui::{App, Entity};
use http_client::HttpClientWithUrl; use http_client::HttpClientWithUrl;
use language_model::LanguageModelRegistry; use language_model::LanguageModelRegistry;
use move_path_tool::MovePathTool; use move_path_tool::MovePathTool;
use std::sync::Arc;
use web_search_tool::WebSearchTool; use web_search_tool::WebSearchTool;
pub(crate) use templates::*; pub(crate) use templates::*;

View file

@ -278,6 +278,9 @@ impl Tool for EditFileTool {
.unwrap_or(false); .unwrap_or(false);
if format_on_save_enabled { if format_on_save_enabled {
action_log.update(cx, |log, cx| {
log.buffer_edited(buffer.clone(), cx);
})?;
let format_task = project.update(cx, |project, cx| { let format_task = project.update(cx, |project, cx| {
project.format( project.format(
HashSet::from_iter([buffer.clone()]), HashSet::from_iter([buffer.clone()]),

View file

@ -200,7 +200,7 @@ mod tests {
// Run the tool before any changes // Run the tool before any changes
let tool = Arc::new(ProjectNotificationsTool); let tool = Arc::new(ProjectNotificationsTool);
let provider = Arc::new(FakeLanguageModelProvider); let provider = Arc::new(FakeLanguageModelProvider::default());
let model: Arc<dyn LanguageModel> = Arc::new(provider.test_model()); let model: Arc<dyn LanguageModel> = Arc::new(provider.test_model());
let request = Arc::new(LanguageModelRequest::default()); let request = Arc::new(LanguageModelRequest::default());
let tool_input = json!({}); let tool_input = json!({});

View file

@ -17,7 +17,5 @@ default = []
[dependencies] [dependencies]
aws-smithy-runtime-api.workspace = true aws-smithy-runtime-api.workspace = true
aws-smithy-types.workspace = true aws-smithy-types.workspace = true
futures.workspace = true
http_client.workspace = true http_client.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
workspace-hack.workspace = true workspace-hack.workspace = true

View file

@ -11,14 +11,11 @@ use aws_smithy_runtime_api::client::result::ConnectorError;
use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents; use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents;
use aws_smithy_runtime_api::http::{Headers, StatusCode}; use aws_smithy_runtime_api::http::{Headers, StatusCode};
use aws_smithy_types::body::SdkBody; use aws_smithy_types::body::SdkBody;
use futures::AsyncReadExt; use http_client::AsyncBody;
use http_client::{AsyncBody, Inner};
use http_client::{HttpClient, Request}; use http_client::{HttpClient, Request};
use tokio::runtime::Handle;
struct AwsHttpConnector { struct AwsHttpConnector {
client: Arc<dyn HttpClient>, client: Arc<dyn HttpClient>,
handle: Handle,
} }
impl std::fmt::Debug for AwsHttpConnector { impl std::fmt::Debug for AwsHttpConnector {
@ -42,18 +39,17 @@ impl AwsConnector for AwsHttpConnector {
.client .client
.send(Request::from_parts(parts, convert_to_async_body(body))); .send(Request::from_parts(parts, convert_to_async_body(body)));
let handle = self.handle.clone();
HttpConnectorFuture::new(async move { HttpConnectorFuture::new(async move {
let response = match response.await { let response = match response.await {
Ok(response) => response, Ok(response) => response,
Err(err) => return Err(ConnectorError::other(err.into(), None)), Err(err) => return Err(ConnectorError::other(err.into(), None)),
}; };
let (parts, body) = response.into_parts(); let (parts, body) = response.into_parts();
let body = convert_to_sdk_body(body, handle).await;
let mut response = let mut response = HttpResponse::new(
HttpResponse::new(StatusCode::try_from(parts.status.as_u16()).unwrap(), body); StatusCode::try_from(parts.status.as_u16()).unwrap(),
convert_to_sdk_body(body),
);
let headers = match Headers::try_from(parts.headers) { let headers = match Headers::try_from(parts.headers) {
Ok(headers) => headers, Ok(headers) => headers,
@ -70,7 +66,6 @@ impl AwsConnector for AwsHttpConnector {
#[derive(Clone)] #[derive(Clone)]
pub struct AwsHttpClient { pub struct AwsHttpClient {
client: Arc<dyn HttpClient>, client: Arc<dyn HttpClient>,
handler: Handle,
} }
impl std::fmt::Debug for AwsHttpClient { impl std::fmt::Debug for AwsHttpClient {
@ -80,11 +75,8 @@ impl std::fmt::Debug for AwsHttpClient {
} }
impl AwsHttpClient { impl AwsHttpClient {
pub fn new(client: Arc<dyn HttpClient>, handle: Handle) -> Self { pub fn new(client: Arc<dyn HttpClient>) -> Self {
Self { Self { client }
client,
handler: handle,
}
} }
} }
@ -96,25 +88,12 @@ impl AwsClient for AwsHttpClient {
) -> SharedHttpConnector { ) -> SharedHttpConnector {
SharedHttpConnector::new(AwsHttpConnector { SharedHttpConnector::new(AwsHttpConnector {
client: self.client.clone(), client: self.client.clone(),
handle: self.handler.clone(),
}) })
} }
} }
pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody { pub fn convert_to_sdk_body(body: AsyncBody) -> SdkBody {
match body.0 { SdkBody::from_body_1_x(body)
Inner::Empty => SdkBody::empty(),
Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()),
Inner::AsyncReader(mut reader) => {
let buffer = handle.spawn(async move {
let mut buffer = Vec::new();
let _ = reader.read_to_end(&mut buffer).await;
buffer
});
SdkBody::from(buffer.await.unwrap_or_default())
}
}
} }
pub fn convert_to_async_body(body: SdkBody) -> AsyncBody { pub fn convert_to_async_body(body: SdkBody) -> AsyncBody {

View file

@ -343,8 +343,7 @@ impl BufferDiffInner {
.. ..
} in hunks.iter().cloned() } in hunks.iter().cloned()
{ {
let preceding_pending_hunks = let preceding_pending_hunks = old_pending_hunks.slice(&buffer_range.start, Bias::Left);
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
pending_hunks.append(preceding_pending_hunks, buffer); pending_hunks.append(preceding_pending_hunks, buffer);
// Skip all overlapping or adjacent old pending hunks // Skip all overlapping or adjacent old pending hunks
@ -355,7 +354,7 @@ impl BufferDiffInner {
.cmp(&buffer_range.end, buffer) .cmp(&buffer_range.end, buffer)
.is_le() .is_le()
}) { }) {
old_pending_hunks.next(buffer); old_pending_hunks.next();
} }
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk) if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
@ -379,10 +378,10 @@ impl BufferDiffInner {
); );
} }
// append the remainder // append the remainder
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer); pending_hunks.append(old_pending_hunks.suffix(), buffer);
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer); let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
unstaged_hunk_cursor.next(buffer); unstaged_hunk_cursor.next();
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits // then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
let mut prev_unstaged_hunk_buffer_end = 0; let mut prev_unstaged_hunk_buffer_end = 0;
@ -397,8 +396,7 @@ impl BufferDiffInner {
}) = pending_hunks_iter.next() }) = pending_hunks_iter.next()
{ {
// Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk // Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk
let skipped_unstaged = let skipped_unstaged = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left);
unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
if let Some(unstaged_hunk) = skipped_unstaged.last() { if let Some(unstaged_hunk) = skipped_unstaged.last() {
prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end; prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end;
@ -425,7 +423,7 @@ impl BufferDiffInner {
buffer_offset_range.end = buffer_offset_range.end =
buffer_offset_range.end.max(unstaged_hunk_offset_range.end); buffer_offset_range.end.max(unstaged_hunk_offset_range.end);
unstaged_hunk_cursor.next(buffer); unstaged_hunk_cursor.next();
continue; continue;
} }
} }
@ -514,7 +512,7 @@ impl BufferDiffInner {
}); });
let anchor_iter = iter::from_fn(move || { let anchor_iter = iter::from_fn(move || {
cursor.next(buffer); cursor.next();
cursor.item() cursor.item()
}) })
.flat_map(move |hunk| { .flat_map(move |hunk| {
@ -531,12 +529,12 @@ impl BufferDiffInner {
}); });
let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer); let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
pending_hunks_cursor.next(buffer); pending_hunks_cursor.next();
let mut secondary_cursor = None; let mut secondary_cursor = None;
if let Some(secondary) = secondary.as_ref() { if let Some(secondary) = secondary.as_ref() {
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer); let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
cursor.next(buffer); cursor.next();
secondary_cursor = Some(cursor); secondary_cursor = Some(cursor);
} }
@ -564,7 +562,7 @@ impl BufferDiffInner {
.cmp(&pending_hunks_cursor.start().buffer_range.start, buffer) .cmp(&pending_hunks_cursor.start().buffer_range.start, buffer)
.is_gt() .is_gt()
{ {
pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer); pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left);
} }
if let Some(pending_hunk) = pending_hunks_cursor.item() { if let Some(pending_hunk) = pending_hunks_cursor.item() {
@ -590,7 +588,7 @@ impl BufferDiffInner {
.cmp(&secondary_cursor.start().buffer_range.start, buffer) .cmp(&secondary_cursor.start().buffer_range.start, buffer)
.is_gt() .is_gt()
{ {
secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer); secondary_cursor.seek_forward(&start_anchor, Bias::Left);
} }
if let Some(secondary_hunk) = secondary_cursor.item() { if let Some(secondary_hunk) = secondary_cursor.item() {
@ -635,7 +633,7 @@ impl BufferDiffInner {
}); });
iter::from_fn(move || { iter::from_fn(move || {
cursor.prev(buffer); cursor.prev();
let hunk = cursor.item()?; let hunk = cursor.item()?;
let range = hunk.buffer_range.to_point(buffer); let range = hunk.buffer_range.to_point(buffer);
@ -653,8 +651,8 @@ impl BufferDiffInner {
fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option<Range<Anchor>> { fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option<Range<Anchor>> {
let mut new_cursor = self.hunks.cursor::<()>(new_snapshot); let mut new_cursor = self.hunks.cursor::<()>(new_snapshot);
let mut old_cursor = old.hunks.cursor::<()>(new_snapshot); let mut old_cursor = old.hunks.cursor::<()>(new_snapshot);
old_cursor.next(new_snapshot); old_cursor.next();
new_cursor.next(new_snapshot); new_cursor.next();
let mut start = None; let mut start = None;
let mut end = None; let mut end = None;
@ -669,7 +667,7 @@ impl BufferDiffInner {
Ordering::Less => { Ordering::Less => {
start.get_or_insert(new_hunk.buffer_range.start); start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end); end.replace(new_hunk.buffer_range.end);
new_cursor.next(new_snapshot); new_cursor.next();
} }
Ordering::Equal => { Ordering::Equal => {
if new_hunk != old_hunk { if new_hunk != old_hunk {
@ -686,25 +684,25 @@ impl BufferDiffInner {
} }
} }
new_cursor.next(new_snapshot); new_cursor.next();
old_cursor.next(new_snapshot); old_cursor.next();
} }
Ordering::Greater => { Ordering::Greater => {
start.get_or_insert(old_hunk.buffer_range.start); start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end); end.replace(old_hunk.buffer_range.end);
old_cursor.next(new_snapshot); old_cursor.next();
} }
} }
} }
(Some(new_hunk), None) => { (Some(new_hunk), None) => {
start.get_or_insert(new_hunk.buffer_range.start); start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end); end.replace(new_hunk.buffer_range.end);
new_cursor.next(new_snapshot); new_cursor.next();
} }
(None, Some(old_hunk)) => { (None, Some(old_hunk)) => {
start.get_or_insert(old_hunk.buffer_range.start); start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end); end.replace(old_hunk.buffer_range.end);
old_cursor.next(new_snapshot); old_cursor.next();
} }
(None, None) => break, (None, None) => break,
} }

View file

@ -333,7 +333,7 @@ impl ChannelChat {
if first_id <= message_id { if first_id <= message_id {
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&()); let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&());
let message_id = ChannelMessageId::Saved(message_id); let message_id = ChannelMessageId::Saved(message_id);
cursor.seek(&message_id, Bias::Left, &()); cursor.seek(&message_id, Bias::Left);
return ControlFlow::Break( return ControlFlow::Break(
if cursor if cursor
.item() .item()
@ -499,7 +499,7 @@ impl ChannelChat {
pub fn message(&self, ix: usize) -> &ChannelMessage { pub fn message(&self, ix: usize) -> &ChannelMessage {
let mut cursor = self.messages.cursor::<Count>(&()); let mut cursor = self.messages.cursor::<Count>(&());
cursor.seek(&Count(ix), Bias::Right, &()); cursor.seek(&Count(ix), Bias::Right);
cursor.item().unwrap() cursor.item().unwrap()
} }
@ -516,13 +516,13 @@ impl ChannelChat {
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> { pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<Count>(&()); let mut cursor = self.messages.cursor::<Count>(&());
cursor.seek(&Count(range.start), Bias::Right, &()); cursor.seek(&Count(range.start), Bias::Right);
cursor.take(range.len()) cursor.take(range.len())
} }
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> { pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&()); let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); cursor.seek(&ChannelMessageId::Pending(0), Bias::Left);
cursor cursor
} }
@ -588,9 +588,9 @@ impl ChannelChat {
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&()); let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&());
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left);
let start_ix = old_cursor.start().1.0; let start_ix = old_cursor.start().1.0;
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); let removed_messages = old_cursor.slice(&last_message.id, Bias::Right);
let removed_count = removed_messages.summary().count; let removed_count = removed_messages.summary().count;
let new_count = messages.summary().count; let new_count = messages.summary().count;
let end_ix = start_ix + removed_count; let end_ix = start_ix + removed_count;
@ -599,10 +599,10 @@ impl ChannelChat {
let mut ranges = Vec::<Range<usize>>::new(); let mut ranges = Vec::<Range<usize>>::new();
if new_messages.last().unwrap().is_pending() { if new_messages.last().unwrap().is_pending() {
new_messages.append(old_cursor.suffix(&()), &()); new_messages.append(old_cursor.suffix(), &());
} else { } else {
new_messages.append( new_messages.append(
old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()), old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left),
&(), &(),
); );
@ -617,7 +617,7 @@ impl ChannelChat {
} else { } else {
new_messages.push(message.clone(), &()); new_messages.push(message.clone(), &());
} }
old_cursor.next(&()); old_cursor.next();
} }
} }
@ -641,12 +641,12 @@ impl ChannelChat {
fn message_removed(&mut self, id: u64, cx: &mut Context<Self>) { fn message_removed(&mut self, id: u64, cx: &mut Context<Self>) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&()); let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left);
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
if item.id == ChannelMessageId::Saved(id) { if item.id == ChannelMessageId::Saved(id) {
let deleted_message_ix = messages.summary().count; let deleted_message_ix = messages.summary().count;
cursor.next(&()); cursor.next();
messages.append(cursor.suffix(&()), &()); messages.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.messages = messages; self.messages = messages;
@ -680,7 +680,7 @@ impl ChannelChat {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&()); let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
let mut messages = cursor.slice(&id, Bias::Left, &()); let mut messages = cursor.slice(&id, Bias::Left);
let ix = messages.summary().count; let ix = messages.summary().count;
if let Some(mut message_to_update) = cursor.item().cloned() { if let Some(mut message_to_update) = cursor.item().cloned() {
@ -688,10 +688,10 @@ impl ChannelChat {
message_to_update.mentions = mentions; message_to_update.mentions = mentions;
message_to_update.edited_at = edited_at; message_to_update.edited_at = edited_at;
messages.push(message_to_update, &()); messages.push(message_to_update, &());
cursor.next(&()); cursor.next();
} }
messages.append(cursor.suffix(&()), &()); messages.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.messages = messages; self.messages = messages;

View file

@ -151,6 +151,7 @@ impl Settings for ProxySettings {
pub fn init_settings(cx: &mut App) { pub fn init_settings(cx: &mut App) {
TelemetrySettings::register(cx); TelemetrySettings::register(cx);
DisableAiSettings::register(cx);
ClientSettings::register(cx); ClientSettings::register(cx);
ProxySettings::register(cx); ProxySettings::register(cx);
} }
@ -548,6 +549,33 @@ impl settings::Settings for TelemetrySettings {
} }
} }
/// Whether to disable all AI features in Zed.
///
/// Default: false
#[derive(Copy, Clone, Debug)]
pub struct DisableAiSettings {
pub disable_ai: bool,
}
impl settings::Settings for DisableAiSettings {
const KEY: Option<&'static str> = Some("disable_ai");
type FileContent = Option<bool>;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
Ok(Self {
disable_ai: sources
.user
.or(sources.server)
.copied()
.flatten()
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?),
})
}
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
}
impl Client { impl Client {
pub fn new( pub fn new(
clock: Arc<dyn SystemClock>, clock: Arc<dyn SystemClock>,

View file

@ -17,7 +17,7 @@ use crate::stripe_client::{
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams, StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams,
StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
UpdateSubscriptionItems, UpdateSubscriptionParams, UpdateSubscriptionItems, UpdateSubscriptionParams,
@ -30,8 +30,6 @@ pub struct StripeBilling {
#[derive(Default)] #[derive(Default)]
struct StripeBillingState { struct StripeBillingState {
meters_by_event_name: HashMap<String, StripeMeter>,
price_ids_by_meter_id: HashMap<String, StripePriceId>,
prices_by_lookup_key: HashMap<String, StripePrice>, prices_by_lookup_key: HashMap<String, StripePrice>,
} }
@ -60,24 +58,11 @@ impl StripeBilling {
let mut state = self.state.write().await; let mut state = self.state.write().await;
let (meters, prices) = let prices = self.client.list_prices().await?;
futures::try_join!(self.client.list_meters(), self.client.list_prices())?;
for meter in meters {
state
.meters_by_event_name
.insert(meter.event_name.clone(), meter);
}
for price in prices { for price in prices {
if let Some(lookup_key) = price.lookup_key.clone() { if let Some(lookup_key) = price.lookup_key.clone() {
state.prices_by_lookup_key.insert(lookup_key, price.clone()); state.prices_by_lookup_key.insert(lookup_key, price);
}
if let Some(recurring) = price.recurring {
if let Some(meter) = recurring.meter {
state.price_ids_by_meter_id.insert(meter, price.id);
}
} }
} }

View file

@ -6,6 +6,7 @@ mod sign_in;
use crate::sign_in::initiate_sign_in_within_workspace; use crate::sign_in::initiate_sign_in_within_workspace;
use ::fs::Fs; use ::fs::Fs;
use anyhow::{Context as _, Result, anyhow}; use anyhow::{Context as _, Result, anyhow};
use client::DisableAiSettings;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use command_palette_hooks::CommandPaletteFilter; use command_palette_hooks::CommandPaletteFilter;
use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared}; use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared};
@ -25,6 +26,7 @@ use node_runtime::NodeRuntime;
use parking_lot::Mutex; use parking_lot::Mutex;
use request::StatusNotification; use request::StatusNotification;
use serde_json::json; use serde_json::json;
use settings::Settings;
use settings::SettingsStore; use settings::SettingsStore;
use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace}; use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace};
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
@ -93,26 +95,34 @@ pub fn init(
let copilot_auth_action_types = [TypeId::of::<SignOut>()]; let copilot_auth_action_types = [TypeId::of::<SignOut>()];
let copilot_no_auth_action_types = [TypeId::of::<SignIn>()]; let copilot_no_auth_action_types = [TypeId::of::<SignIn>()];
let status = handle.read(cx).status(); let status = handle.read(cx).status();
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let filter = CommandPaletteFilter::global_mut(cx); let filter = CommandPaletteFilter::global_mut(cx);
match status { if is_ai_disabled {
Status::Disabled => { filter.hide_action_types(&copilot_action_types);
filter.hide_action_types(&copilot_action_types); filter.hide_action_types(&copilot_auth_action_types);
filter.hide_action_types(&copilot_auth_action_types); filter.hide_action_types(&copilot_no_auth_action_types);
filter.hide_action_types(&copilot_no_auth_action_types); } else {
} match status {
Status::Authorized => { Status::Disabled => {
filter.hide_action_types(&copilot_no_auth_action_types); filter.hide_action_types(&copilot_action_types);
filter.show_action_types( filter.hide_action_types(&copilot_auth_action_types);
copilot_action_types filter.hide_action_types(&copilot_no_auth_action_types);
.iter() }
.chain(&copilot_auth_action_types), Status::Authorized => {
); filter.hide_action_types(&copilot_no_auth_action_types);
} filter.show_action_types(
_ => { copilot_action_types
filter.hide_action_types(&copilot_action_types); .iter()
filter.hide_action_types(&copilot_auth_action_types); .chain(&copilot_auth_action_types),
filter.show_action_types(copilot_no_auth_action_types.iter()); );
}
_ => {
filter.hide_action_types(&copilot_action_types);
filter.hide_action_types(&copilot_auth_action_types);
filter.show_action_types(copilot_no_auth_action_types.iter());
}
} }
} }
}) })

View file

@ -524,10 +524,10 @@ impl BlockMap {
// * Isomorphic transforms that end *at* the start of the edit // * Isomorphic transforms that end *at* the start of the edit
// * Below blocks that end at the start of the edit // * Below blocks that end at the start of the edit
// However, if we hit a replace block that ends at the start of the edit we want to reconstruct it. // However, if we hit a replace block that ends at the start of the edit we want to reconstruct it.
new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&old_start, Bias::Left), &());
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
if transform.summary.input_rows > 0 if transform.summary.input_rows > 0
&& cursor.end(&()) == old_start && cursor.end() == old_start
&& transform && transform
.block .block
.as_ref() .as_ref()
@ -535,13 +535,13 @@ impl BlockMap {
{ {
// Preserve the transform (push and next) // Preserve the transform (push and next)
new_transforms.push(transform.clone(), &()); new_transforms.push(transform.clone(), &());
cursor.next(&()); cursor.next();
// Preserve below blocks at end of edit // Preserve below blocks at end of edit
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) { if transform.block.as_ref().map_or(false, |b| b.place_below()) {
new_transforms.push(transform.clone(), &()); new_transforms.push(transform.clone(), &());
cursor.next(&()); cursor.next();
} else { } else {
break; break;
} }
@ -579,8 +579,8 @@ impl BlockMap {
let mut new_end = WrapRow(edit.new.end); let mut new_end = WrapRow(edit.new.end);
loop { loop {
// Seek to the transform starting at or after the end of the edit // Seek to the transform starting at or after the end of the edit
cursor.seek(&old_end, Bias::Left, &()); cursor.seek(&old_end, Bias::Left);
cursor.next(&()); cursor.next();
// Extend edit to the end of the discarded transform so it is reconstructed in full // Extend edit to the end of the discarded transform so it is reconstructed in full
let transform_rows_after_edit = cursor.start().0 - old_end.0; let transform_rows_after_edit = cursor.start().0 - old_end.0;
@ -592,8 +592,8 @@ impl BlockMap {
if next_edit.old.start <= cursor.start().0 { if next_edit.old.start <= cursor.start().0 {
old_end = WrapRow(next_edit.old.end); old_end = WrapRow(next_edit.old.end);
new_end = WrapRow(next_edit.new.end); new_end = WrapRow(next_edit.new.end);
cursor.seek(&old_end, Bias::Left, &()); cursor.seek(&old_end, Bias::Left);
cursor.next(&()); cursor.next();
edits.next(); edits.next();
} else { } else {
break; break;
@ -608,7 +608,7 @@ impl BlockMap {
// Discard below blocks at the end of the edit. They'll be reconstructed. // Discard below blocks at the end of the edit. They'll be reconstructed.
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) { if transform.block.as_ref().map_or(false, |b| b.place_below()) {
cursor.next(&()); cursor.next();
} else { } else {
break; break;
} }
@ -720,7 +720,7 @@ impl BlockMap {
push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot); push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot);
} }
new_transforms.append(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(), &());
debug_assert_eq!( debug_assert_eq!(
new_transforms.summary().input_rows, new_transforms.summary().input_rows,
wrap_snapshot.max_point().row() + 1 wrap_snapshot.max_point().row() + 1
@ -971,7 +971,7 @@ impl BlockMapReader<'_> {
); );
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&start_wrap_row, Bias::Left, &()); cursor.seek(&start_wrap_row, Bias::Left);
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if cursor.start().0 > end_wrap_row { if cursor.start().0 > end_wrap_row {
break; break;
@ -982,7 +982,7 @@ impl BlockMapReader<'_> {
return Some(cursor.start().1); return Some(cursor.start().1);
} }
} }
cursor.next(&()); cursor.next();
} }
None None
@ -1293,7 +1293,7 @@ impl BlockSnapshot {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(rows.start), Bias::Right, &()); cursor.seek(&BlockRow(rows.start), Bias::Right);
let transform_output_start = cursor.start().0.0; let transform_output_start = cursor.start().0.0;
let transform_input_start = cursor.start().1.0; let transform_input_start = cursor.start().1.0;
@ -1325,7 +1325,7 @@ impl BlockSnapshot {
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> { pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&start_row, Bias::Right, &()); cursor.seek(&start_row, Bias::Right);
let (output_start, input_start) = cursor.start(); let (output_start, input_start) = cursor.start();
let overshoot = if cursor let overshoot = if cursor
.item() .item()
@ -1346,9 +1346,9 @@ impl BlockSnapshot {
pub fn blocks_in_range(&self, rows: Range<u32>) -> impl Iterator<Item = (u32, &Block)> { pub fn blocks_in_range(&self, rows: Range<u32>) -> impl Iterator<Item = (u32, &Block)> {
let mut cursor = self.transforms.cursor::<BlockRow>(&()); let mut cursor = self.transforms.cursor::<BlockRow>(&());
cursor.seek(&BlockRow(rows.start), Bias::Left, &()); cursor.seek(&BlockRow(rows.start), Bias::Left);
while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start { while cursor.start().0 < rows.start && cursor.end().0 <= rows.start {
cursor.next(&()); cursor.next();
} }
std::iter::from_fn(move || { std::iter::from_fn(move || {
@ -1364,10 +1364,10 @@ impl BlockSnapshot {
break; break;
} }
if let Some(block) = &transform.block { if let Some(block) = &transform.block {
cursor.next(&()); cursor.next();
return Some((start_row, block)); return Some((start_row, block));
} else { } else {
cursor.next(&()); cursor.next();
} }
} }
None None
@ -1377,7 +1377,7 @@ impl BlockSnapshot {
pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> { pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> {
let top_row = position as u32; let top_row = position as u32;
let mut cursor = self.transforms.cursor::<BlockRow>(&()); let mut cursor = self.transforms.cursor::<BlockRow>(&());
cursor.seek(&BlockRow(top_row), Bias::Right, &()); cursor.seek(&BlockRow(top_row), Bias::Right);
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
match &transform.block { match &transform.block {
@ -1386,7 +1386,7 @@ impl BlockSnapshot {
} }
Some(block) if block.is_buffer_header() => return None, Some(block) if block.is_buffer_header() => return None,
_ => { _ => {
cursor.prev(&()); cursor.prev();
continue; continue;
} }
} }
@ -1414,7 +1414,7 @@ impl BlockSnapshot {
let wrap_row = WrapRow(wrap_point.row()); let wrap_row = WrapRow(wrap_point.row());
let mut cursor = self.transforms.cursor::<WrapRow>(&()); let mut cursor = self.transforms.cursor::<WrapRow>(&());
cursor.seek(&wrap_row, Bias::Left, &()); cursor.seek(&wrap_row, Bias::Left);
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if let Some(block) = transform.block.as_ref() { if let Some(block) = transform.block.as_ref() {
@ -1425,7 +1425,7 @@ impl BlockSnapshot {
break; break;
} }
cursor.next(&()); cursor.next();
} }
None None
@ -1442,7 +1442,7 @@ impl BlockSnapshot {
pub fn longest_row_in_range(&self, range: Range<BlockRow>) -> BlockRow { pub fn longest_row_in_range(&self, range: Range<BlockRow>) -> BlockRow {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
let mut longest_row = range.start; let mut longest_row = range.start;
let mut longest_row_chars = 0; let mut longest_row_chars = 0;
@ -1453,7 +1453,7 @@ impl BlockSnapshot {
let wrap_start_row = input_start.0 + overshoot; let wrap_start_row = input_start.0 + overshoot;
let wrap_end_row = cmp::min( let wrap_end_row = cmp::min(
input_start.0 + (range.end.0 - output_start.0), input_start.0 + (range.end.0 - output_start.0),
cursor.end(&()).1.0, cursor.end().1.0,
); );
let summary = self let summary = self
.wrap_snapshot .wrap_snapshot
@ -1461,12 +1461,12 @@ impl BlockSnapshot {
longest_row = BlockRow(range.start.0 + summary.longest_row); longest_row = BlockRow(range.start.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars; longest_row_chars = summary.longest_row_chars;
} }
cursor.next(&()); cursor.next();
} }
let cursor_start_row = cursor.start().0; let cursor_start_row = cursor.start().0;
if range.end > cursor_start_row { if range.end > cursor_start_row {
let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right, &()); let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right);
if summary.longest_row_chars > longest_row_chars { if summary.longest_row_chars > longest_row_chars {
longest_row = BlockRow(cursor_start_row.0 + summary.longest_row); longest_row = BlockRow(cursor_start_row.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars; longest_row_chars = summary.longest_row_chars;
@ -1493,7 +1493,7 @@ impl BlockSnapshot {
pub(super) fn line_len(&self, row: BlockRow) -> u32 { pub(super) fn line_len(&self, row: BlockRow) -> u32 {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(row.0), Bias::Right, &()); cursor.seek(&BlockRow(row.0), Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let (output_start, input_start) = cursor.start(); let (output_start, input_start) = cursor.start();
let overshoot = row.0 - output_start.0; let overshoot = row.0 - output_start.0;
@ -1511,13 +1511,13 @@ impl BlockSnapshot {
pub(super) fn is_block_line(&self, row: BlockRow) -> bool { pub(super) fn is_block_line(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&row, Bias::Right, &()); cursor.seek(&row, Bias::Right);
cursor.item().map_or(false, |t| t.block.is_some()) cursor.item().map_or(false, |t| t.block.is_some())
} }
pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool { pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&row, Bias::Right, &()); cursor.seek(&row, Bias::Right);
let Some(transform) = cursor.item() else { let Some(transform) = cursor.item() else {
return false; return false;
}; };
@ -1529,7 +1529,7 @@ impl BlockSnapshot {
.wrap_snapshot .wrap_snapshot
.make_wrap_point(Point::new(row.0, 0), Bias::Left); .make_wrap_point(Point::new(row.0, 0), Bias::Left);
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
cursor.item().map_or(false, |transform| { cursor.item().map_or(false, |transform| {
transform transform
.block .block
@ -1540,17 +1540,17 @@ impl BlockSnapshot {
pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(point.row), Bias::Right, &()); cursor.seek(&BlockRow(point.row), Bias::Right);
let max_input_row = WrapRow(self.transforms.summary().input_rows); let max_input_row = WrapRow(self.transforms.summary().input_rows);
let mut search_left = let mut search_left =
(bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end(&()).1 == max_input_row; (bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end().1 == max_input_row;
let mut reversed = false; let mut reversed = false;
loop { loop {
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let (output_start_row, input_start_row) = cursor.start(); let (output_start_row, input_start_row) = cursor.start();
let (output_end_row, input_end_row) = cursor.end(&()); let (output_end_row, input_end_row) = cursor.end();
let output_start = Point::new(output_start_row.0, 0); let output_start = Point::new(output_start_row.0, 0);
let input_start = Point::new(input_start_row.0, 0); let input_start = Point::new(input_start_row.0, 0);
let input_end = Point::new(input_end_row.0, 0); let input_end = Point::new(input_end_row.0, 0);
@ -1584,23 +1584,23 @@ impl BlockSnapshot {
} }
if search_left { if search_left {
cursor.prev(&()); cursor.prev();
} else { } else {
cursor.next(&()); cursor.next();
} }
} else if reversed { } else if reversed {
return self.max_point(); return self.max_point();
} else { } else {
reversed = true; reversed = true;
search_left = !search_left; search_left = !search_left;
cursor.seek(&BlockRow(point.row), Bias::Right, &()); cursor.seek(&BlockRow(point.row), Bias::Right);
} }
} }
} }
pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
if transform.block.is_some() { if transform.block.is_some() {
BlockPoint::new(cursor.start().1.0, 0) BlockPoint::new(cursor.start().1.0, 0)
@ -1618,7 +1618,7 @@ impl BlockSnapshot {
pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint { pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); cursor.seek(&BlockRow(block_point.row), Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
match transform.block.as_ref() { match transform.block.as_ref() {
Some(block) => { Some(block) => {
@ -1630,7 +1630,7 @@ impl BlockSnapshot {
} else if bias == Bias::Left { } else if bias == Bias::Left {
WrapPoint::new(cursor.start().1.0, 0) WrapPoint::new(cursor.start().1.0, 0)
} else { } else {
let wrap_row = cursor.end(&()).1.0 - 1; let wrap_row = cursor.end().1.0 - 1;
WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row))
} }
} }
@ -1650,14 +1650,14 @@ impl BlockChunks<'_> {
/// Go to the next transform /// Go to the next transform
fn advance(&mut self) { fn advance(&mut self) {
self.input_chunk = Chunk::default(); self.input_chunk = Chunk::default();
self.transforms.next(&()); self.transforms.next();
while let Some(transform) = self.transforms.item() { while let Some(transform) = self.transforms.item() {
if transform if transform
.block .block
.as_ref() .as_ref()
.map_or(false, |block| block.height() == 0) .map_or(false, |block| block.height() == 0)
{ {
self.transforms.next(&()); self.transforms.next();
} else { } else {
break; break;
} }
@ -1672,7 +1672,7 @@ impl BlockChunks<'_> {
let start_output_row = self.transforms.start().0.0; let start_output_row = self.transforms.start().0.0;
if start_output_row < self.max_output_row { if start_output_row < self.max_output_row {
let end_input_row = cmp::min( let end_input_row = cmp::min(
self.transforms.end(&()).1.0, self.transforms.end().1.0,
start_input_row + (self.max_output_row - start_output_row), start_input_row + (self.max_output_row - start_output_row),
); );
self.input_chunks.seek(start_input_row..end_input_row); self.input_chunks.seek(start_input_row..end_input_row);
@ -1696,7 +1696,7 @@ impl<'a> Iterator for BlockChunks<'a> {
let transform = self.transforms.item()?; let transform = self.transforms.item()?;
if transform.block.is_some() { if transform.block.is_some() {
let block_start = self.transforms.start().0.0; let block_start = self.transforms.start().0.0;
let mut block_end = self.transforms.end(&()).0.0; let mut block_end = self.transforms.end().0.0;
self.advance(); self.advance();
if self.transforms.item().is_none() { if self.transforms.item().is_none() {
block_end -= 1; block_end -= 1;
@ -1731,7 +1731,7 @@ impl<'a> Iterator for BlockChunks<'a> {
} }
} }
let transform_end = self.transforms.end(&()).0.0; let transform_end = self.transforms.end().0.0;
let (prefix_rows, prefix_bytes) = let (prefix_rows, prefix_bytes) =
offset_for_row(self.input_chunk.text, transform_end - self.output_row); offset_for_row(self.input_chunk.text, transform_end - self.output_row);
self.output_row += prefix_rows; self.output_row += prefix_rows;
@ -1770,15 +1770,15 @@ impl Iterator for BlockRows<'_> {
self.started = true; self.started = true;
} }
if self.output_row.0 >= self.transforms.end(&()).0.0 { if self.output_row.0 >= self.transforms.end().0.0 {
self.transforms.next(&()); self.transforms.next();
while let Some(transform) = self.transforms.item() { while let Some(transform) = self.transforms.item() {
if transform if transform
.block .block
.as_ref() .as_ref()
.map_or(false, |block| block.height() == 0) .map_or(false, |block| block.height() == 0)
{ {
self.transforms.next(&()); self.transforms.next();
} else { } else {
break; break;
} }

View file

@ -52,15 +52,15 @@ impl CreaseSnapshot {
) -> Option<&'a Crease<Anchor>> { ) -> Option<&'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(row.0, 0)); let start = snapshot.anchor_before(Point::new(row.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot); let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
cursor.seek(&start, Bias::Left, snapshot); cursor.seek(&start, Bias::Left);
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) { match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) {
Ordering::Less => cursor.next(snapshot), Ordering::Less => cursor.next(),
Ordering::Equal => { Ordering::Equal => {
if item.crease.range().start.is_valid(snapshot) { if item.crease.range().start.is_valid(snapshot) {
return Some(&item.crease); return Some(&item.crease);
} else { } else {
cursor.next(snapshot); cursor.next();
} }
} }
Ordering::Greater => break, Ordering::Greater => break,
@ -76,11 +76,11 @@ impl CreaseSnapshot {
) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> { ) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(range.start.0, 0)); let start = snapshot.anchor_before(Point::new(range.start.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot); let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
cursor.seek(&start, Bias::Left, snapshot); cursor.seek(&start, Bias::Left);
std::iter::from_fn(move || { std::iter::from_fn(move || {
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
cursor.next(snapshot); cursor.next();
let crease_range = item.crease.range(); let crease_range = item.crease.range();
let crease_start = crease_range.start.to_point(snapshot); let crease_start = crease_range.start.to_point(snapshot);
let crease_end = crease_range.end.to_point(snapshot); let crease_end = crease_range.end.to_point(snapshot);
@ -102,13 +102,13 @@ impl CreaseSnapshot {
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot); let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
let mut results = Vec::new(); let mut results = Vec::new();
cursor.next(snapshot); cursor.next();
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
let crease_range = item.crease.range(); let crease_range = item.crease.range();
let start_point = crease_range.start.to_point(snapshot); let start_point = crease_range.start.to_point(snapshot);
let end_point = crease_range.end.to_point(snapshot); let end_point = crease_range.end.to_point(snapshot);
results.push((item.id, start_point..end_point)); results.push((item.id, start_point..end_point));
cursor.next(snapshot); cursor.next();
} }
results results
@ -298,7 +298,7 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot); let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for crease in creases { for crease in creases {
let crease_range = crease.range().clone(); let crease_range = crease.range().clone();
new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot); new_creases.append(cursor.slice(&crease_range, Bias::Left), snapshot);
let id = self.next_id; let id = self.next_id;
self.next_id.0 += 1; self.next_id.0 += 1;
@ -306,7 +306,7 @@ impl CreaseMap {
new_creases.push(CreaseItem { crease, id }, snapshot); new_creases.push(CreaseItem { crease, id }, snapshot);
new_ids.push(id); new_ids.push(id);
} }
new_creases.append(cursor.suffix(snapshot), snapshot); new_creases.append(cursor.suffix(), snapshot);
new_creases new_creases
}; };
new_ids new_ids
@ -332,9 +332,9 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot); let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for (id, range) in &removals { for (id, range) in &removals {
new_creases.append(cursor.slice(range, Bias::Left, snapshot), snapshot); new_creases.append(cursor.slice(range, Bias::Left), snapshot);
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
cursor.next(snapshot); cursor.next();
if item.id == *id { if item.id == *id {
break; break;
} else { } else {
@ -343,7 +343,7 @@ impl CreaseMap {
} }
} }
new_creases.append(cursor.suffix(snapshot), snapshot); new_creases.append(cursor.suffix(), snapshot);
new_creases new_creases
}; };

View file

@ -99,7 +99,7 @@ impl FoldPoint {
pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint {
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&self, Bias::Right, &()); cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0; let overshoot = self.0 - cursor.start().0.0;
InlayPoint(cursor.start().1.0 + overshoot) InlayPoint(cursor.start().1.0 + overshoot)
} }
@ -108,7 +108,7 @@ impl FoldPoint {
let mut cursor = snapshot let mut cursor = snapshot
.transforms .transforms
.cursor::<(FoldPoint, TransformSummary)>(&()); .cursor::<(FoldPoint, TransformSummary)>(&());
cursor.seek(&self, Bias::Right, &()); cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().1.output.lines; let overshoot = self.0 - cursor.start().1.output.lines;
let mut offset = cursor.start().1.output.len; let mut offset = cursor.start().1.output.len;
if !overshoot.is_zero() { if !overshoot.is_zero() {
@ -187,10 +187,10 @@ impl FoldMapWriter<'_> {
width: None, width: None,
}, },
); );
new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); new_tree.append(cursor.slice(&fold.range, Bias::Right), buffer);
new_tree.push(fold, buffer); new_tree.push(fold, buffer);
} }
new_tree.append(cursor.suffix(buffer), buffer); new_tree.append(cursor.suffix(), buffer);
new_tree new_tree
}; };
@ -252,7 +252,7 @@ impl FoldMapWriter<'_> {
fold_ixs_to_delete.push(*folds_cursor.start()); fold_ixs_to_delete.push(*folds_cursor.start());
self.0.snapshot.fold_metadata_by_id.remove(&fold.id); self.0.snapshot.fold_metadata_by_id.remove(&fold.id);
} }
folds_cursor.next(buffer); folds_cursor.next();
} }
} }
@ -263,10 +263,10 @@ impl FoldMapWriter<'_> {
let mut cursor = self.0.snapshot.folds.cursor::<usize>(buffer); let mut cursor = self.0.snapshot.folds.cursor::<usize>(buffer);
let mut folds = SumTree::new(buffer); let mut folds = SumTree::new(buffer);
for fold_ix in fold_ixs_to_delete { for fold_ix in fold_ixs_to_delete {
folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); folds.append(cursor.slice(&fold_ix, Bias::Right), buffer);
cursor.next(buffer); cursor.next();
} }
folds.append(cursor.suffix(buffer), buffer); folds.append(cursor.suffix(), buffer);
folds folds
}; };
@ -412,7 +412,7 @@ impl FoldMap {
let mut new_transforms = SumTree::<Transform>::default(); let mut new_transforms = SumTree::<Transform>::default();
let mut cursor = self.snapshot.transforms.cursor::<InlayOffset>(&()); let mut cursor = self.snapshot.transforms.cursor::<InlayOffset>(&());
cursor.seek(&InlayOffset(0), Bias::Right, &()); cursor.seek(&InlayOffset(0), Bias::Right);
while let Some(mut edit) = inlay_edits_iter.next() { while let Some(mut edit) = inlay_edits_iter.next() {
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
@ -421,19 +421,19 @@ impl FoldMap {
|transform| { |transform| {
if !transform.is_fold() { if !transform.is_fold() {
transform.summary.add_summary(&item.summary, &()); transform.summary.add_summary(&item.summary, &());
cursor.next(&()); cursor.next();
} }
}, },
&(), &(),
); );
} }
} }
new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&edit.old.start, Bias::Left), &());
edit.new.start -= edit.old.start - *cursor.start(); edit.new.start -= edit.old.start - *cursor.start();
edit.old.start = *cursor.start(); edit.old.start = *cursor.start();
cursor.seek(&edit.old.end, Bias::Right, &()); cursor.seek(&edit.old.end, Bias::Right);
cursor.next(&()); cursor.next();
let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize; let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize;
loop { loop {
@ -449,8 +449,8 @@ impl FoldMap {
if next_edit.old.end >= edit.old.end { if next_edit.old.end >= edit.old.end {
edit.old.end = next_edit.old.end; edit.old.end = next_edit.old.end;
cursor.seek(&edit.old.end, Bias::Right, &()); cursor.seek(&edit.old.end, Bias::Right);
cursor.next(&()); cursor.next();
} }
} else { } else {
break; break;
@ -467,11 +467,7 @@ impl FoldMap {
.snapshot .snapshot
.folds .folds
.cursor::<FoldRange>(&inlay_snapshot.buffer); .cursor::<FoldRange>(&inlay_snapshot.buffer);
folds_cursor.seek( folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left);
&FoldRange(anchor..Anchor::max()),
Bias::Left,
&inlay_snapshot.buffer,
);
let mut folds = iter::from_fn({ let mut folds = iter::from_fn({
let inlay_snapshot = &inlay_snapshot; let inlay_snapshot = &inlay_snapshot;
@ -485,7 +481,7 @@ impl FoldMap {
..inlay_snapshot.to_inlay_offset(buffer_end), ..inlay_snapshot.to_inlay_offset(buffer_end),
) )
}); });
folds_cursor.next(&inlay_snapshot.buffer); folds_cursor.next();
item item
} }
}) })
@ -558,7 +554,7 @@ impl FoldMap {
} }
} }
new_transforms.append(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() { if new_transforms.is_empty() {
let text_summary = inlay_snapshot.text_summary(); let text_summary = inlay_snapshot.text_summary();
push_isomorphic(&mut new_transforms, text_summary); push_isomorphic(&mut new_transforms, text_summary);
@ -575,31 +571,31 @@ impl FoldMap {
let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&()); let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&());
for mut edit in inlay_edits { for mut edit in inlay_edits {
old_transforms.seek(&edit.old.start, Bias::Left, &()); old_transforms.seek(&edit.old.start, Bias::Left);
if old_transforms.item().map_or(false, |t| t.is_fold()) { if old_transforms.item().map_or(false, |t| t.is_fold()) {
edit.old.start = old_transforms.start().0; edit.old.start = old_transforms.start().0;
} }
let old_start = let old_start =
old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0; old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0;
old_transforms.seek_forward(&edit.old.end, Bias::Right, &()); old_transforms.seek_forward(&edit.old.end, Bias::Right);
if old_transforms.item().map_or(false, |t| t.is_fold()) { if old_transforms.item().map_or(false, |t| t.is_fold()) {
old_transforms.next(&()); old_transforms.next();
edit.old.end = old_transforms.start().0; edit.old.end = old_transforms.start().0;
} }
let old_end = let old_end =
old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0; old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0;
new_transforms.seek(&edit.new.start, Bias::Left, &()); new_transforms.seek(&edit.new.start, Bias::Left);
if new_transforms.item().map_or(false, |t| t.is_fold()) { if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new.start = new_transforms.start().0; edit.new.start = new_transforms.start().0;
} }
let new_start = let new_start =
new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0; new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0;
new_transforms.seek_forward(&edit.new.end, Bias::Right, &()); new_transforms.seek_forward(&edit.new.end, Bias::Right);
if new_transforms.item().map_or(false, |t| t.is_fold()) { if new_transforms.item().map_or(false, |t| t.is_fold()) {
new_transforms.next(&()); new_transforms.next();
edit.new.end = new_transforms.start().0; edit.new.end = new_transforms.start().0;
} }
let new_end = let new_end =
@ -656,10 +652,10 @@ impl FoldSnapshot {
let mut summary = TextSummary::default(); let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let start_in_transform = range.start.0 - cursor.start().0.0; let start_in_transform = range.start.0 - cursor.start().0.0;
let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0.0; let end_in_transform = cmp::min(range.end, cursor.end().0).0 - cursor.start().0.0;
if let Some(placeholder) = transform.placeholder.as_ref() { if let Some(placeholder) = transform.placeholder.as_ref() {
summary = TextSummary::from( summary = TextSummary::from(
&placeholder.text &placeholder.text
@ -678,10 +674,10 @@ impl FoldSnapshot {
} }
} }
if range.end > cursor.end(&()).0 { if range.end > cursor.end().0 {
cursor.next(&()); cursor.next();
summary += &cursor summary += &cursor
.summary::<_, TransformSummary>(&range.end, Bias::Right, &()) .summary::<_, TransformSummary>(&range.end, Bias::Right)
.output; .output;
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let end_in_transform = range.end.0 - cursor.start().0.0; let end_in_transform = range.end.0 - cursor.start().0.0;
@ -705,19 +701,16 @@ impl FoldSnapshot {
pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&()); let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| t.is_fold()) { if cursor.item().map_or(false, |t| t.is_fold()) {
if bias == Bias::Left || point == cursor.start().0 { if bias == Bias::Left || point == cursor.start().0 {
cursor.start().1 cursor.start().1
} else { } else {
cursor.end(&()).1 cursor.end().1
} }
} else { } else {
let overshoot = point.0 - cursor.start().0.0; let overshoot = point.0 - cursor.start().0.0;
FoldPoint(cmp::min( FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0))
cursor.start().1.0 + overshoot,
cursor.end(&()).1.0,
))
} }
} }
@ -742,7 +735,7 @@ impl FoldSnapshot {
let fold_point = FoldPoint::new(start_row, 0); let fold_point = FoldPoint::new(start_row, 0);
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&fold_point, Bias::Left, &()); cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - cursor.start().0.0; let overshoot = fold_point.0 - cursor.start().0.0;
let inlay_point = InlayPoint(cursor.start().1.0 + overshoot); let inlay_point = InlayPoint(cursor.start().1.0 + overshoot);
@ -773,7 +766,7 @@ impl FoldSnapshot {
let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false); let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false);
iter::from_fn(move || { iter::from_fn(move || {
let item = folds.item(); let item = folds.item();
folds.next(&self.inlay_snapshot.buffer); folds.next();
item item
}) })
} }
@ -785,7 +778,7 @@ impl FoldSnapshot {
let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer);
let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset);
let mut cursor = self.transforms.cursor::<InlayOffset>(&()); let mut cursor = self.transforms.cursor::<InlayOffset>(&());
cursor.seek(&inlay_offset, Bias::Right, &()); cursor.seek(&inlay_offset, Bias::Right);
cursor.item().map_or(false, |t| t.placeholder.is_some()) cursor.item().map_or(false, |t| t.placeholder.is_some())
} }
@ -794,7 +787,7 @@ impl FoldSnapshot {
.inlay_snapshot .inlay_snapshot
.to_inlay_point(Point::new(buffer_row.0, 0)); .to_inlay_point(Point::new(buffer_row.0, 0));
let mut cursor = self.transforms.cursor::<InlayPoint>(&()); let mut cursor = self.transforms.cursor::<InlayPoint>(&());
cursor.seek(&inlay_point, Bias::Right, &()); cursor.seek(&inlay_point, Bias::Right);
loop { loop {
match cursor.item() { match cursor.item() {
Some(transform) => { Some(transform) => {
@ -808,11 +801,11 @@ impl FoldSnapshot {
None => return false, None => return false,
} }
if cursor.end(&()).row() == inlay_point.row() { if cursor.end().row() == inlay_point.row() {
cursor.next(&()); cursor.next();
} else { } else {
inlay_point.0 += Point::new(1, 0); inlay_point.0 += Point::new(1, 0);
cursor.seek(&inlay_point, Bias::Right, &()); cursor.seek(&inlay_point, Bias::Right);
} }
} }
} }
@ -824,14 +817,14 @@ impl FoldSnapshot {
highlights: Highlights<'a>, highlights: Highlights<'a>,
) -> FoldChunks<'a> { ) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
transform_cursor.seek(&range.start, Bias::Right, &()); transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = { let inlay_start = {
let overshoot = range.start.0 - transform_cursor.start().0.0; let overshoot = range.start.0 - transform_cursor.start().0.0;
transform_cursor.start().1 + InlayOffset(overshoot) transform_cursor.start().1 + InlayOffset(overshoot)
}; };
let transform_end = transform_cursor.end(&()); let transform_end = transform_cursor.end();
let inlay_end = if transform_cursor let inlay_end = if transform_cursor
.item() .item()
@ -879,14 +872,14 @@ impl FoldSnapshot {
pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let transform_start = cursor.start().0.0; let transform_start = cursor.start().0.0;
if transform.placeholder.is_some() { if transform.placeholder.is_some() {
if point.0 == transform_start || matches!(bias, Bias::Left) { if point.0 == transform_start || matches!(bias, Bias::Left) {
FoldPoint(transform_start) FoldPoint(transform_start)
} else { } else {
FoldPoint(cursor.end(&()).0.0) FoldPoint(cursor.end().0.0)
} }
} else { } else {
let overshoot = InlayPoint(point.0 - transform_start); let overshoot = InlayPoint(point.0 - transform_start);
@ -945,7 +938,7 @@ fn intersecting_folds<'a>(
start_cmp == Ordering::Less && end_cmp == Ordering::Greater start_cmp == Ordering::Less && end_cmp == Ordering::Greater
} }
}); });
cursor.next(buffer); cursor.next();
cursor cursor
} }
@ -1211,7 +1204,7 @@ pub struct FoldRows<'a> {
impl FoldRows<'_> { impl FoldRows<'_> {
pub(crate) fn seek(&mut self, row: u32) { pub(crate) fn seek(&mut self, row: u32) {
let fold_point = FoldPoint::new(row, 0); let fold_point = FoldPoint::new(row, 0);
self.cursor.seek(&fold_point, Bias::Left, &()); self.cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - self.cursor.start().0.0; let overshoot = fold_point.0 - self.cursor.start().0.0;
let inlay_point = InlayPoint(self.cursor.start().1.0 + overshoot); let inlay_point = InlayPoint(self.cursor.start().1.0 + overshoot);
self.input_rows.seek(inlay_point.row()); self.input_rows.seek(inlay_point.row());
@ -1224,8 +1217,8 @@ impl Iterator for FoldRows<'_> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let mut traversed_fold = false; let mut traversed_fold = false;
while self.fold_point > self.cursor.end(&()).0 { while self.fold_point > self.cursor.end().0 {
self.cursor.next(&()); self.cursor.next();
traversed_fold = true; traversed_fold = true;
if self.cursor.item().is_none() { if self.cursor.item().is_none() {
break; break;
@ -1330,14 +1323,14 @@ pub struct FoldChunks<'a> {
impl FoldChunks<'_> { impl FoldChunks<'_> {
pub(crate) fn seek(&mut self, range: Range<FoldOffset>) { pub(crate) fn seek(&mut self, range: Range<FoldOffset>) {
self.transform_cursor.seek(&range.start, Bias::Right, &()); self.transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = { let inlay_start = {
let overshoot = range.start.0 - self.transform_cursor.start().0.0; let overshoot = range.start.0 - self.transform_cursor.start().0.0;
self.transform_cursor.start().1 + InlayOffset(overshoot) self.transform_cursor.start().1 + InlayOffset(overshoot)
}; };
let transform_end = self.transform_cursor.end(&()); let transform_end = self.transform_cursor.end();
let inlay_end = if self let inlay_end = if self
.transform_cursor .transform_cursor
@ -1376,10 +1369,10 @@ impl<'a> Iterator for FoldChunks<'a> {
self.inlay_chunk.take(); self.inlay_chunk.take();
self.inlay_offset += InlayOffset(transform.summary.input.len); self.inlay_offset += InlayOffset(transform.summary.input.len);
while self.inlay_offset >= self.transform_cursor.end(&()).1 while self.inlay_offset >= self.transform_cursor.end().1
&& self.transform_cursor.item().is_some() && self.transform_cursor.item().is_some()
{ {
self.transform_cursor.next(&()); self.transform_cursor.next();
} }
self.output_offset.0 += placeholder.text.len(); self.output_offset.0 += placeholder.text.len();
@ -1396,7 +1389,7 @@ impl<'a> Iterator for FoldChunks<'a> {
&& self.inlay_chunks.offset() != self.inlay_offset && self.inlay_chunks.offset() != self.inlay_offset
{ {
let transform_start = self.transform_cursor.start(); let transform_start = self.transform_cursor.start();
let transform_end = self.transform_cursor.end(&()); let transform_end = self.transform_cursor.end();
let inlay_end = if self.max_output_offset < transform_end.0 { let inlay_end = if self.max_output_offset < transform_end.0 {
let overshoot = self.max_output_offset.0 - transform_start.0.0; let overshoot = self.max_output_offset.0 - transform_start.0.0;
transform_start.1 + InlayOffset(overshoot) transform_start.1 + InlayOffset(overshoot)
@ -1417,14 +1410,14 @@ impl<'a> Iterator for FoldChunks<'a> {
if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() { if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() {
let chunk = &mut inlay_chunk.chunk; let chunk = &mut inlay_chunk.chunk;
let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len()); let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len());
let transform_end = self.transform_cursor.end(&()).1; let transform_end = self.transform_cursor.end().1;
let chunk_end = buffer_chunk_end.min(transform_end); let chunk_end = buffer_chunk_end.min(transform_end);
chunk.text = &chunk.text chunk.text = &chunk.text
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0]; [(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
if chunk_end == transform_end { if chunk_end == transform_end {
self.transform_cursor.next(&()); self.transform_cursor.next();
} else if chunk_end == buffer_chunk_end { } else if chunk_end == buffer_chunk_end {
self.inlay_chunk.take(); self.inlay_chunk.take();
} }
@ -1456,7 +1449,7 @@ impl FoldOffset {
let mut cursor = snapshot let mut cursor = snapshot
.transforms .transforms
.cursor::<(FoldOffset, TransformSummary)>(&()); .cursor::<(FoldOffset, TransformSummary)>(&());
cursor.seek(&self, Bias::Right, &()); cursor.seek(&self, Bias::Right);
let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) {
Point::new(0, (self.0 - cursor.start().0.0) as u32) Point::new(0, (self.0 - cursor.start().0.0) as u32)
} else { } else {
@ -1470,7 +1463,7 @@ impl FoldOffset {
#[cfg(test)] #[cfg(test)]
pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset {
let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
cursor.seek(&self, Bias::Right, &()); cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0; let overshoot = self.0 - cursor.start().0.0;
InlayOffset(cursor.start().1.0 + overshoot) InlayOffset(cursor.start().1.0 + overshoot)
} }

View file

@ -263,7 +263,7 @@ pub struct InlayChunk<'a> {
impl InlayChunks<'_> { impl InlayChunks<'_> {
pub fn seek(&mut self, new_range: Range<InlayOffset>) { pub fn seek(&mut self, new_range: Range<InlayOffset>) {
self.transforms.seek(&new_range.start, Bias::Right, &()); self.transforms.seek(&new_range.start, Bias::Right);
let buffer_range = self.snapshot.to_buffer_offset(new_range.start) let buffer_range = self.snapshot.to_buffer_offset(new_range.start)
..self.snapshot.to_buffer_offset(new_range.end); ..self.snapshot.to_buffer_offset(new_range.end);
@ -296,12 +296,12 @@ impl<'a> Iterator for InlayChunks<'a> {
*chunk = self.buffer_chunks.next().unwrap(); *chunk = self.buffer_chunks.next().unwrap();
} }
let desired_bytes = self.transforms.end(&()).0.0 - self.output_offset.0; let desired_bytes = self.transforms.end().0.0 - self.output_offset.0;
// If we're already at the transform boundary, skip to the next transform // If we're already at the transform boundary, skip to the next transform
if desired_bytes == 0 { if desired_bytes == 0 {
self.inlay_chunks = None; self.inlay_chunks = None;
self.transforms.next(&()); self.transforms.next();
return self.next(); return self.next();
} }
@ -397,7 +397,7 @@ impl<'a> Iterator for InlayChunks<'a> {
let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| { let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| {
let start = offset_in_inlay; let start = offset_in_inlay;
let end = cmp::min(self.max_output_offset, self.transforms.end(&()).0) let end = cmp::min(self.max_output_offset, self.transforms.end().0)
- self.transforms.start().0; - self.transforms.start().0;
inlay.text.chunks_in_range(start.0..end.0) inlay.text.chunks_in_range(start.0..end.0)
}); });
@ -441,9 +441,9 @@ impl<'a> Iterator for InlayChunks<'a> {
} }
}; };
if self.output_offset >= self.transforms.end(&()).0 { if self.output_offset >= self.transforms.end().0 {
self.inlay_chunks = None; self.inlay_chunks = None;
self.transforms.next(&()); self.transforms.next();
} }
Some(chunk) Some(chunk)
@ -453,7 +453,7 @@ impl<'a> Iterator for InlayChunks<'a> {
impl InlayBufferRows<'_> { impl InlayBufferRows<'_> {
pub fn seek(&mut self, row: u32) { pub fn seek(&mut self, row: u32) {
let inlay_point = InlayPoint::new(row, 0); let inlay_point = InlayPoint::new(row, 0);
self.transforms.seek(&inlay_point, Bias::Left, &()); self.transforms.seek(&inlay_point, Bias::Left);
let mut buffer_point = self.transforms.start().1; let mut buffer_point = self.transforms.start().1;
let buffer_row = MultiBufferRow(if row == 0 { let buffer_row = MultiBufferRow(if row == 0 {
@ -487,7 +487,7 @@ impl Iterator for InlayBufferRows<'_> {
self.inlay_row += 1; self.inlay_row += 1;
self.transforms self.transforms
.seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left, &()); .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left);
Some(buffer_row) Some(buffer_row)
} }
@ -556,18 +556,18 @@ impl InlayMap {
let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&()); let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&());
let mut buffer_edits_iter = buffer_edits.iter().peekable(); let mut buffer_edits_iter = buffer_edits.iter().peekable();
while let Some(buffer_edit) = buffer_edits_iter.next() { while let Some(buffer_edit) = buffer_edits_iter.next() {
new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), &());
if let Some(Transform::Isomorphic(transform)) = cursor.item() { if let Some(Transform::Isomorphic(transform)) = cursor.item() {
if cursor.end(&()).0 == buffer_edit.old.start { if cursor.end().0 == buffer_edit.old.start {
push_isomorphic(&mut new_transforms, *transform); push_isomorphic(&mut new_transforms, *transform);
cursor.next(&()); cursor.next();
} }
} }
// Remove all the inlays and transforms contained by the edit. // Remove all the inlays and transforms contained by the edit.
let old_start = let old_start =
cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0); cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0);
cursor.seek(&buffer_edit.old.end, Bias::Right, &()); cursor.seek(&buffer_edit.old.end, Bias::Right);
let old_end = let old_end =
cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0); cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0);
@ -625,20 +625,20 @@ impl InlayMap {
// we can push its remainder. // we can push its remainder.
if buffer_edits_iter if buffer_edits_iter
.peek() .peek()
.map_or(true, |edit| edit.old.start >= cursor.end(&()).0) .map_or(true, |edit| edit.old.start >= cursor.end().0)
{ {
let transform_start = new_transforms.summary().input.len; let transform_start = new_transforms.summary().input.len;
let transform_end = let transform_end =
buffer_edit.new.end + (cursor.end(&()).0 - buffer_edit.old.end); buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end);
push_isomorphic( push_isomorphic(
&mut new_transforms, &mut new_transforms,
buffer_snapshot.text_summary_for_range(transform_start..transform_end), buffer_snapshot.text_summary_for_range(transform_start..transform_end),
); );
cursor.next(&()); cursor.next();
} }
} }
new_transforms.append(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() { if new_transforms.is_empty() {
new_transforms.push(Transform::Isomorphic(Default::default()), &()); new_transforms.push(Transform::Isomorphic(Default::default()), &());
} }
@ -773,7 +773,7 @@ impl InlaySnapshot {
let mut cursor = self let mut cursor = self
.transforms .transforms
.cursor::<(InlayOffset, (InlayPoint, usize))>(&()); .cursor::<(InlayOffset, (InlayPoint, usize))>(&());
cursor.seek(&offset, Bias::Right, &()); cursor.seek(&offset, Bias::Right);
let overshoot = offset.0 - cursor.start().0.0; let overshoot = offset.0 - cursor.start().0.0;
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
@ -803,7 +803,7 @@ impl InlaySnapshot {
let mut cursor = self let mut cursor = self
.transforms .transforms
.cursor::<(InlayPoint, (InlayOffset, Point))>(&()); .cursor::<(InlayPoint, (InlayOffset, Point))>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
let overshoot = point.0 - cursor.start().0.0; let overshoot = point.0 - cursor.start().0.0;
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
@ -822,7 +822,7 @@ impl InlaySnapshot {
} }
pub fn to_buffer_point(&self, point: InlayPoint) -> Point { pub fn to_buffer_point(&self, point: InlayPoint) -> Point {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
let overshoot = point.0 - cursor.start().0.0; let overshoot = point.0 - cursor.start().0.0;
@ -834,7 +834,7 @@ impl InlaySnapshot {
} }
pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&offset, Bias::Right, &()); cursor.seek(&offset, Bias::Right);
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
@ -847,19 +847,19 @@ impl InlaySnapshot {
pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset {
let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&()); let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
loop { loop {
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
if offset == cursor.end(&()).0 { if offset == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() { while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right { if inlay.position.bias() == Bias::Right {
break; break;
} else { } else {
cursor.next(&()); cursor.next();
} }
} }
return cursor.end(&()).1; return cursor.end().1;
} else { } else {
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
return InlayOffset(cursor.start().1.0 + overshoot); return InlayOffset(cursor.start().1.0 + overshoot);
@ -867,7 +867,7 @@ impl InlaySnapshot {
} }
Some(Transform::Inlay(inlay)) => { Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left { if inlay.position.bias() == Bias::Left {
cursor.next(&()); cursor.next();
} else { } else {
return cursor.start().1; return cursor.start().1;
} }
@ -880,19 +880,19 @@ impl InlaySnapshot {
} }
pub fn to_inlay_point(&self, point: Point) -> InlayPoint { pub fn to_inlay_point(&self, point: Point) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&()); let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&());
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left);
loop { loop {
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(_)) => { Some(Transform::Isomorphic(_)) => {
if point == cursor.end(&()).0 { if point == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() { while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right { if inlay.position.bias() == Bias::Right {
break; break;
} else { } else {
cursor.next(&()); cursor.next();
} }
} }
return cursor.end(&()).1; return cursor.end().1;
} else { } else {
let overshoot = point - cursor.start().0; let overshoot = point - cursor.start().0;
return InlayPoint(cursor.start().1.0 + overshoot); return InlayPoint(cursor.start().1.0 + overshoot);
@ -900,7 +900,7 @@ impl InlaySnapshot {
} }
Some(Transform::Inlay(inlay)) => { Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left { if inlay.position.bias() == Bias::Left {
cursor.next(&()); cursor.next();
} else { } else {
return cursor.start().1; return cursor.start().1;
} }
@ -914,7 +914,7 @@ impl InlaySnapshot {
pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left);
loop { loop {
match cursor.item() { match cursor.item() {
Some(Transform::Isomorphic(transform)) => { Some(Transform::Isomorphic(transform)) => {
@ -923,7 +923,7 @@ impl InlaySnapshot {
if inlay.position.bias() == Bias::Left { if inlay.position.bias() == Bias::Left {
return point; return point;
} else if bias == Bias::Left { } else if bias == Bias::Left {
cursor.prev(&()); cursor.prev();
} else if transform.first_line_chars == 0 { } else if transform.first_line_chars == 0 {
point.0 += Point::new(1, 0); point.0 += Point::new(1, 0);
} else { } else {
@ -932,12 +932,12 @@ impl InlaySnapshot {
} else { } else {
return point; return point;
} }
} else if cursor.end(&()).0 == point { } else if cursor.end().0 == point {
if let Some(Transform::Inlay(inlay)) = cursor.next_item() { if let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right { if inlay.position.bias() == Bias::Right {
return point; return point;
} else if bias == Bias::Right { } else if bias == Bias::Right {
cursor.next(&()); cursor.next();
} else if point.0.column == 0 { } else if point.0.column == 0 {
point.0.row -= 1; point.0.row -= 1;
point.0.column = self.line_len(point.0.row); point.0.column = self.line_len(point.0.row);
@ -970,7 +970,7 @@ impl InlaySnapshot {
} }
_ => return point, _ => return point,
} }
} else if point == cursor.end(&()).0 && inlay.position.bias() == Bias::Left { } else if point == cursor.end().0 && inlay.position.bias() == Bias::Left {
match cursor.next_item() { match cursor.next_item() {
Some(Transform::Inlay(inlay)) => { Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Right { if inlay.position.bias() == Bias::Right {
@ -983,9 +983,9 @@ impl InlaySnapshot {
if bias == Bias::Left { if bias == Bias::Left {
point = cursor.start().0; point = cursor.start().0;
cursor.prev(&()); cursor.prev();
} else { } else {
cursor.next(&()); cursor.next();
point = cursor.start().0; point = cursor.start().0;
} }
} }
@ -993,9 +993,9 @@ impl InlaySnapshot {
bias = bias.invert(); bias = bias.invert();
if bias == Bias::Left { if bias == Bias::Left {
point = cursor.start().0; point = cursor.start().0;
cursor.prev(&()); cursor.prev();
} else { } else {
cursor.next(&()); cursor.next();
point = cursor.start().0; point = cursor.start().0;
} }
} }
@ -1011,7 +1011,7 @@ impl InlaySnapshot {
let mut summary = TextSummary::default(); let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
let overshoot = range.start.0 - cursor.start().0.0; let overshoot = range.start.0 - cursor.start().0.0;
match cursor.item() { match cursor.item() {
@ -1019,22 +1019,22 @@ impl InlaySnapshot {
let buffer_start = cursor.start().1; let buffer_start = cursor.start().1;
let suffix_start = buffer_start + overshoot; let suffix_start = buffer_start + overshoot;
let suffix_end = let suffix_end =
buffer_start + (cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0); buffer_start + (cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0);
summary = self.buffer.text_summary_for_range(suffix_start..suffix_end); summary = self.buffer.text_summary_for_range(suffix_start..suffix_end);
cursor.next(&()); cursor.next();
} }
Some(Transform::Inlay(inlay)) => { Some(Transform::Inlay(inlay)) => {
let suffix_start = overshoot; let suffix_start = overshoot;
let suffix_end = cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0; let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0;
summary = inlay.text.cursor(suffix_start).summary(suffix_end); summary = inlay.text.cursor(suffix_start).summary(suffix_end);
cursor.next(&()); cursor.next();
} }
None => {} None => {}
} }
if range.end > cursor.start().0 { if range.end > cursor.start().0 {
summary += cursor summary += cursor
.summary::<_, TransformSummary>(&range.end, Bias::Right, &()) .summary::<_, TransformSummary>(&range.end, Bias::Right)
.output; .output;
let overshoot = range.end.0 - cursor.start().0.0; let overshoot = range.end.0 - cursor.start().0.0;
@ -1060,7 +1060,7 @@ impl InlaySnapshot {
pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> { pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
let inlay_point = InlayPoint::new(row, 0); let inlay_point = InlayPoint::new(row, 0);
cursor.seek(&inlay_point, Bias::Left, &()); cursor.seek(&inlay_point, Bias::Left);
let max_buffer_row = self.buffer.max_row(); let max_buffer_row = self.buffer.max_row();
let mut buffer_point = cursor.start().1; let mut buffer_point = cursor.start().1;
@ -1101,7 +1101,7 @@ impl InlaySnapshot {
highlights: Highlights<'a>, highlights: Highlights<'a>,
) -> InlayChunks<'a> { ) -> InlayChunks<'a> {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end); let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end);
let buffer_chunks = CustomHighlightsChunks::new( let buffer_chunks = CustomHighlightsChunks::new(

View file

@ -72,7 +72,7 @@ pub struct WrapRows<'a> {
impl WrapRows<'_> { impl WrapRows<'_> {
pub(crate) fn seek(&mut self, start_row: u32) { pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms self.transforms
.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); .seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = self.transforms.start().1.row(); let mut input_row = self.transforms.start().1.row();
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - self.transforms.start().0.row(); input_row += start_row - self.transforms.start().0.row();
@ -340,7 +340,7 @@ impl WrapSnapshot {
let mut tab_edits_iter = tab_edits.iter().peekable(); let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms = new_transforms =
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &()); old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right);
while let Some(edit) = tab_edits_iter.next() { while let Some(edit) = tab_edits_iter.next() {
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) { if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
@ -356,31 +356,29 @@ impl WrapSnapshot {
)); ));
} }
old_cursor.seek_forward(&edit.old.end, Bias::Right, &()); old_cursor.seek_forward(&edit.old.end, Bias::Right);
if let Some(next_edit) = tab_edits_iter.peek() { if let Some(next_edit) = tab_edits_iter.peek() {
if next_edit.old.start > old_cursor.end(&()) { if next_edit.old.start > old_cursor.end() {
if old_cursor.end(&()) > edit.old.end { if old_cursor.end() > edit.old.end {
let summary = self let summary = self
.tab_snapshot .tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&())); .text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next();
new_transforms.append( new_transforms
old_cursor.slice(&next_edit.old.start, Bias::Right, &()), .append(old_cursor.slice(&next_edit.old.start, Bias::Right), &());
&(),
);
} }
} else { } else {
if old_cursor.end(&()) > edit.old.end { if old_cursor.end() > edit.old.end {
let summary = self let summary = self
.tab_snapshot .tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&())); .text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next();
new_transforms.append(old_cursor.suffix(&()), &()); new_transforms.append(old_cursor.suffix(), &());
} }
} }
} }
@ -441,7 +439,6 @@ impl WrapSnapshot {
new_transforms = old_cursor.slice( new_transforms = old_cursor.slice(
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
Bias::Right, Bias::Right,
&(),
); );
while let Some(edit) = row_edits.next() { while let Some(edit) = row_edits.next() {
@ -516,34 +513,31 @@ impl WrapSnapshot {
} }
new_transforms.extend(edit_transforms, &()); new_transforms.extend(edit_transforms, &());
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &()); old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right);
if let Some(next_edit) = row_edits.peek() { if let Some(next_edit) = row_edits.peek() {
if next_edit.old_rows.start > old_cursor.end(&()).row() { if next_edit.old_rows.start > old_cursor.end().row() {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range( let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
); );
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next();
new_transforms.append( new_transforms.append(
old_cursor.slice( old_cursor
&TabPoint::new(next_edit.old_rows.start, 0), .slice(&TabPoint::new(next_edit.old_rows.start, 0), Bias::Right),
Bias::Right,
&(),
),
&(), &(),
); );
} }
} else { } else {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range( let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
); );
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next();
new_transforms.append(old_cursor.suffix(&()), &()); new_transforms.append(old_cursor.suffix(), &());
} }
} }
} }
@ -570,19 +564,19 @@ impl WrapSnapshot {
tab_edit.new.start.0.column = 0; tab_edit.new.start.0.column = 0;
tab_edit.new.end.0 += Point::new(1, 0); tab_edit.new.end.0 += Point::new(1, 0);
old_cursor.seek(&tab_edit.old.start, Bias::Right, &()); old_cursor.seek(&tab_edit.old.start, Bias::Right);
let mut old_start = old_cursor.start().output.lines; let mut old_start = old_cursor.start().output.lines;
old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
old_cursor.seek(&tab_edit.old.end, Bias::Right, &()); old_cursor.seek(&tab_edit.old.end, Bias::Right);
let mut old_end = old_cursor.start().output.lines; let mut old_end = old_cursor.start().output.lines;
old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new.start, Bias::Right, &()); new_cursor.seek(&tab_edit.new.start, Bias::Right);
let mut new_start = new_cursor.start().output.lines; let mut new_start = new_cursor.start().output.lines;
new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new.end, Bias::Right, &()); new_cursor.seek(&tab_edit.new.end, Bias::Right);
let mut new_end = new_cursor.start().output.lines; let mut new_end = new_cursor.start().output.lines;
new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
@ -605,7 +599,7 @@ impl WrapSnapshot {
let output_start = WrapPoint::new(rows.start, 0); let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0); let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&output_start, Bias::Right, &()); transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(transforms.start().1.0); let mut input_start = TabPoint(transforms.start().1.0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) { if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.start().0.0; input_start.0 += output_start.0 - transforms.start().0.0;
@ -633,7 +627,7 @@ impl WrapSnapshot {
pub fn line_len(&self, row: u32) -> u32 { pub fn line_len(&self, row: u32) -> u32 {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left);
if cursor if cursor
.item() .item()
.map_or(false, |transform| transform.is_isomorphic()) .map_or(false, |transform| transform.is_isomorphic())
@ -658,10 +652,10 @@ impl WrapSnapshot {
let end = WrapPoint::new(rows.end, 0); let end = WrapPoint::new(rows.end, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&start, Bias::Right, &()); cursor.seek(&start, Bias::Right);
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
let start_in_transform = start.0 - cursor.start().0.0; let start_in_transform = start.0 - cursor.start().0.0;
let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0.0; let end_in_transform = cmp::min(end, cursor.end().0).0 - cursor.start().0.0;
if transform.is_isomorphic() { if transform.is_isomorphic() {
let tab_start = TabPoint(cursor.start().1.0 + start_in_transform); let tab_start = TabPoint(cursor.start().1.0 + start_in_transform);
let tab_end = TabPoint(cursor.start().1.0 + end_in_transform); let tab_end = TabPoint(cursor.start().1.0 + end_in_transform);
@ -678,12 +672,12 @@ impl WrapSnapshot {
}; };
} }
cursor.next(&()); cursor.next();
} }
if rows.end > cursor.start().0.row() { if rows.end > cursor.start().0.row() {
summary += &cursor summary += &cursor
.summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &()) .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right)
.output; .output;
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
@ -712,7 +706,7 @@ impl WrapSnapshot {
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> { pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<WrapPoint>(&()); let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right);
cursor.item().and_then(|transform| { cursor.item().and_then(|transform| {
if transform.is_isomorphic() { if transform.is_isomorphic() {
None None
@ -728,7 +722,7 @@ impl WrapSnapshot {
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> { pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = transforms.start().1.row(); let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) { if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.start().0.row(); input_row += start_row - transforms.start().0.row();
@ -748,7 +742,7 @@ impl WrapSnapshot {
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
let mut tab_point = cursor.start().1.0; let mut tab_point = cursor.start().1.0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) { if cursor.item().map_or(false, |t| t.is_isomorphic()) {
tab_point += point.0 - cursor.start().0.0; tab_point += point.0 - cursor.start().0.0;
@ -766,14 +760,14 @@ impl WrapSnapshot {
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0)) WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0))
} }
pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint {
if bias == Bias::Left { if bias == Bias::Left {
let mut cursor = self.transforms.cursor::<WrapPoint>(&()); let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| !t.is_isomorphic()) { if cursor.item().map_or(false, |t| !t.is_isomorphic()) {
point = *cursor.start(); point = *cursor.start();
*point.column_mut() -= 1; *point.column_mut() -= 1;
@ -791,16 +785,16 @@ impl WrapSnapshot {
*point.column_mut() = 0; *point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
if cursor.item().is_none() { if cursor.item().is_none() {
cursor.prev(&()); cursor.prev();
} }
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 { if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return cmp::min(cursor.end(&()).0.row(), point.row()); return cmp::min(cursor.end().0.row(), point.row());
} else { } else {
cursor.prev(&()); cursor.prev();
} }
} }
@ -811,12 +805,12 @@ impl WrapSnapshot {
point.0 += Point::new(1, 0); point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
while let Some(transform) = cursor.item() { while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 { if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return Some(cmp::max(cursor.start().0.row(), point.row())); return Some(cmp::max(cursor.start().0.row(), point.row()));
} else { } else {
cursor.next(&()); cursor.next();
} }
} }
@ -889,7 +883,7 @@ impl WrapChunks<'_> {
pub(crate) fn seek(&mut self, rows: Range<u32>) { pub(crate) fn seek(&mut self, rows: Range<u32>) {
let output_start = WrapPoint::new(rows.start, 0); let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0); let output_end = WrapPoint::new(rows.end, 0);
self.transforms.seek(&output_start, Bias::Right, &()); self.transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(self.transforms.start().1.0); let mut input_start = TabPoint(self.transforms.start().1.0);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - self.transforms.start().0.0; input_start.0 += output_start.0 - self.transforms.start().0.0;
@ -930,7 +924,7 @@ impl<'a> Iterator for WrapChunks<'a> {
} }
self.output_position.0 += summary; self.output_position.0 += summary;
self.transforms.next(&()); self.transforms.next();
return Some(Chunk { return Some(Chunk {
text: &display_text[start_ix..end_ix], text: &display_text[start_ix..end_ix],
..Default::default() ..Default::default()
@ -942,7 +936,7 @@ impl<'a> Iterator for WrapChunks<'a> {
} }
let mut input_len = 0; let mut input_len = 0;
let transform_end = self.transforms.end(&()).0; let transform_end = self.transforms.end().0;
for c in self.input_chunk.text.chars() { for c in self.input_chunk.text.chars() {
let char_len = c.len_utf8(); let char_len = c.len_utf8();
input_len += char_len; input_len += char_len;
@ -954,7 +948,7 @@ impl<'a> Iterator for WrapChunks<'a> {
} }
if self.output_position >= transform_end { if self.output_position >= transform_end {
self.transforms.next(&()); self.transforms.next();
break; break;
} }
} }
@ -982,7 +976,7 @@ impl Iterator for WrapRows<'_> {
self.output_row += 1; self.output_row += 1;
self.transforms self.transforms
.seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left, &()); .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
self.input_buffer_row = self.input_buffer_rows.next().unwrap(); self.input_buffer_row = self.input_buffer_rows.next().unwrap();
self.soft_wrapped = false; self.soft_wrapped = false;

View file

@ -9570,6 +9570,74 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
} }
} }
#[gpui::test]
async fn test_redo_after_noop_format(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.ensure_final_newline_on_save = Some(false);
});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.txt"), "foo".into()).await;
let project = Project::test(fs, [path!("/file.txt").as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.txt"), cx)
})
.await
.unwrap();
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let (editor, cx) = cx.add_window_view(|window, cx| {
build_editor_with_project(project.clone(), buffer, window, cx)
});
editor.update_in(cx, |editor, window, cx| {
editor.change_selections(SelectionEffects::default(), window, cx, |s| {
s.select_ranges([0..0])
});
});
assert!(!cx.read(|cx| editor.is_dirty(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.handle_input("\n", window, cx)
});
cx.run_until_parked();
save(&editor, &project, cx).await;
assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.undo(&Default::default(), window, cx);
});
save(&editor, &project, cx).await;
assert_eq!("foo", editor.read_with(cx, |editor, cx| editor.text(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.redo(&Default::default(), window, cx);
});
cx.run_until_parked();
assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
async fn save(editor: &Entity<Editor>, project: &Entity<Project>, cx: &mut VisualTestContext) {
let save = editor
.update_in(cx, |editor, window, cx| {
editor.save(
SaveOptions {
format: true,
autosave: false,
},
project.clone(),
window,
cx,
)
})
.unwrap();
cx.executor().start_waiting();
save.await;
assert!(!cx.read(|cx| editor.is_dirty(cx)));
}
}
#[gpui::test] #[gpui::test]
async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -22708,7 +22776,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
workspace::init_settings(cx); workspace::init_settings(cx);
crate::init(cx); crate::init(cx);
}); });
zlog::init_test();
update_test_language_settings(cx, f); update_test_language_settings(cx, f);
} }

View file

@ -296,7 +296,7 @@ impl GitBlame {
let row = info let row = info
.buffer_row .buffer_row
.filter(|_| info.buffer_id == Some(buffer_id))?; .filter(|_| info.buffer_id == Some(buffer_id))?;
cursor.seek_forward(&row, Bias::Right, &()); cursor.seek_forward(&row, Bias::Right);
cursor.item()?.blame.clone() cursor.item()?.blame.clone()
}) })
} }
@ -389,7 +389,7 @@ impl GitBlame {
} }
} }
new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &()); new_entries.append(cursor.slice(&edit.old.start, Bias::Right), &());
if edit.new.start > new_entries.summary().rows { if edit.new.start > new_entries.summary().rows {
new_entries.push( new_entries.push(
@ -401,7 +401,7 @@ impl GitBlame {
); );
} }
cursor.seek(&edit.old.end, Bias::Right, &()); cursor.seek(&edit.old.end, Bias::Right);
if !edit.new.is_empty() { if !edit.new.is_empty() {
new_entries.push( new_entries.push(
GitBlameEntry { GitBlameEntry {
@ -412,7 +412,7 @@ impl GitBlame {
); );
} }
let old_end = cursor.end(&()); let old_end = cursor.end();
if row_edits if row_edits
.peek() .peek()
.map_or(true, |next_edit| next_edit.old.start >= old_end) .map_or(true, |next_edit| next_edit.old.start >= old_end)
@ -421,18 +421,18 @@ impl GitBlame {
if old_end > edit.old.end { if old_end > edit.old.end {
new_entries.push( new_entries.push(
GitBlameEntry { GitBlameEntry {
rows: cursor.end(&()) - edit.old.end, rows: cursor.end() - edit.old.end,
blame: entry.blame.clone(), blame: entry.blame.clone(),
}, },
&(), &(),
); );
} }
cursor.next(&()); cursor.next();
} }
} }
} }
new_entries.append(cursor.suffix(&()), &()); new_entries.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.buffer_snapshot = new_snapshot; self.buffer_snapshot = new_snapshot;

View file

@ -23,6 +23,7 @@ askpass.workspace = true
buffer_diff.workspace = true buffer_diff.workspace = true
call.workspace = true call.workspace = true
chrono.workspace = true chrono.workspace = true
client.workspace = true
collections.workspace = true collections.workspace = true
command_palette_hooks.workspace = true command_palette_hooks.workspace = true
component.workspace = true component.workspace = true

View file

@ -1,8 +1,10 @@
use crate::branch_picker::{self, BranchList}; use crate::branch_picker::{self, BranchList};
use crate::git_panel::{GitPanel, commit_message_editor}; use crate::git_panel::{GitPanel, commit_message_editor};
use client::DisableAiSettings;
use git::repository::CommitOptions; use git::repository::CommitOptions;
use git::{Amend, Commit, GenerateCommitMessage, Signoff}; use git::{Amend, Commit, GenerateCommitMessage, Signoff};
use panel::{panel_button, panel_editor_style}; use panel::{panel_button, panel_editor_style};
use settings::Settings;
use ui::{ use ui::{
ContextMenu, KeybindingHint, PopoverMenu, PopoverMenuHandle, SplitButton, Tooltip, prelude::*, ContextMenu, KeybindingHint, PopoverMenu, PopoverMenuHandle, SplitButton, Tooltip, prelude::*,
}; };
@ -569,11 +571,13 @@ impl Render for CommitModal {
.on_action(cx.listener(Self::dismiss)) .on_action(cx.listener(Self::dismiss))
.on_action(cx.listener(Self::commit)) .on_action(cx.listener(Self::commit))
.on_action(cx.listener(Self::amend)) .on_action(cx.listener(Self::amend))
.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| { .when(!DisableAiSettings::get_global(cx).disable_ai, |this| {
this.git_panel.update(cx, |panel, cx| { this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
panel.generate_commit_message(cx); this.git_panel.update(cx, |panel, cx| {
}) panel.generate_commit_message(cx);
})) })
}))
})
.on_action( .on_action(
cx.listener(|this, _: &zed_actions::git::Branch, window, cx| { cx.listener(|this, _: &zed_actions::git::Branch, window, cx| {
this.toggle_branch_selector(window, cx); this.toggle_branch_selector(window, cx);

View file

@ -12,6 +12,7 @@ use crate::{
use agent_settings::AgentSettings; use agent_settings::AgentSettings;
use anyhow::Context as _; use anyhow::Context as _;
use askpass::AskPassDelegate; use askpass::AskPassDelegate;
use client::DisableAiSettings;
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use editor::{ use editor::{
Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar, Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar,
@ -53,7 +54,7 @@ use project::{
git_store::{GitStoreEvent, Repository}, git_store::{GitStoreEvent, Repository},
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore}; use settings::{Settings, SettingsStore};
use std::future::Future; use std::future::Future;
use std::ops::Range; use std::ops::Range;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -464,9 +465,14 @@ impl GitPanel {
}; };
let mut assistant_enabled = AgentSettings::get_global(cx).enabled; let mut assistant_enabled = AgentSettings::get_global(cx).enabled;
let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| { let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
if assistant_enabled != AgentSettings::get_global(cx).enabled { let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
if assistant_enabled != AgentSettings::get_global(cx).enabled
|| was_ai_disabled != is_ai_disabled
{
assistant_enabled = AgentSettings::get_global(cx).enabled; assistant_enabled = AgentSettings::get_global(cx).enabled;
was_ai_disabled = is_ai_disabled;
cx.notify(); cx.notify();
} }
}); });
@ -1806,7 +1812,7 @@ impl GitPanel {
/// Generates a commit message using an LLM. /// Generates a commit message using an LLM.
pub fn generate_commit_message(&mut self, cx: &mut Context<Self>) { pub fn generate_commit_message(&mut self, cx: &mut Context<Self>) {
if !self.can_commit() { if !self.can_commit() || DisableAiSettings::get_global(cx).disable_ai {
return; return;
} }
@ -4305,8 +4311,10 @@ impl GitPanel {
} }
fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn LanguageModel>> { fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn LanguageModel>> {
agent_settings::AgentSettings::get_global(cx) let is_enabled = agent_settings::AgentSettings::get_global(cx).enabled
.enabled && !DisableAiSettings::get_global(cx).disable_ai;
is_enabled
.then(|| { .then(|| {
let ConfiguredModel { provider, model } = let ConfiguredModel { provider, model } =
LanguageModelRegistry::read_global(cx).commit_message_model()?; LanguageModelRegistry::read_global(cx).commit_message_model()?;
@ -5037,6 +5045,7 @@ mod tests {
language::init(cx); language::init(cx);
editor::init(cx); editor::init(cx);
Project::init_settings(cx); Project::init_settings(cx);
client::DisableAiSettings::register(cx);
crate::init(cx); crate::init(cx);
}); });
} }

View file

@ -501,7 +501,7 @@ mod remote_button {
) )
.into_any_element(); .into_any_element();
SplitButton { left, right } SplitButton::new(left, right)
} }
} }

View file

@ -121,7 +121,7 @@ smallvec.workspace = true
smol.workspace = true smol.workspace = true
strum.workspace = true strum.workspace = true
sum_tree.workspace = true sum_tree.workspace = true
taffy = "=0.5.1" taffy = "=0.8.3"
thiserror.workspace = true thiserror.workspace = true
util.workspace = true util.workspace = true
uuid.workspace = true uuid.workspace = true

View file

@ -249,8 +249,8 @@ impl ListState {
let state = &mut *self.0.borrow_mut(); let state = &mut *self.0.borrow_mut();
let mut old_items = state.items.cursor::<Count>(&()); let mut old_items = state.items.cursor::<Count>(&());
let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &()); let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right);
old_items.seek_forward(&Count(old_range.end), Bias::Right, &()); old_items.seek_forward(&Count(old_range.end), Bias::Right);
let mut spliced_count = 0; let mut spliced_count = 0;
new_items.extend( new_items.extend(
@ -260,7 +260,7 @@ impl ListState {
}), }),
&(), &(),
); );
new_items.append(old_items.suffix(&()), &()); new_items.append(old_items.suffix(), &());
drop(old_items); drop(old_items);
state.items = new_items; state.items = new_items;
@ -300,14 +300,14 @@ impl ListState {
let current_offset = self.logical_scroll_top(); let current_offset = self.logical_scroll_top();
let state = &mut *self.0.borrow_mut(); let state = &mut *self.0.borrow_mut();
let mut cursor = state.items.cursor::<ListItemSummary>(&()); let mut cursor = state.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(current_offset.item_ix), Bias::Right, &()); cursor.seek(&Count(current_offset.item_ix), Bias::Right);
let start_pixel_offset = cursor.start().height + current_offset.offset_in_item; let start_pixel_offset = cursor.start().height + current_offset.offset_in_item;
let new_pixel_offset = (start_pixel_offset + distance).max(px(0.)); let new_pixel_offset = (start_pixel_offset + distance).max(px(0.));
if new_pixel_offset > start_pixel_offset { if new_pixel_offset > start_pixel_offset {
cursor.seek_forward(&Height(new_pixel_offset), Bias::Right, &()); cursor.seek_forward(&Height(new_pixel_offset), Bias::Right);
} else { } else {
cursor.seek(&Height(new_pixel_offset), Bias::Right, &()); cursor.seek(&Height(new_pixel_offset), Bias::Right);
} }
state.logical_scroll_top = Some(ListOffset { state.logical_scroll_top = Some(ListOffset {
@ -343,11 +343,11 @@ impl ListState {
scroll_top.offset_in_item = px(0.); scroll_top.offset_in_item = px(0.);
} else { } else {
let mut cursor = state.items.cursor::<ListItemSummary>(&()); let mut cursor = state.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(ix + 1), Bias::Right, &()); cursor.seek(&Count(ix + 1), Bias::Right);
let bottom = cursor.start().height + padding.top; let bottom = cursor.start().height + padding.top;
let goal_top = px(0.).max(bottom - height + padding.bottom); let goal_top = px(0.).max(bottom - height + padding.bottom);
cursor.seek(&Height(goal_top), Bias::Left, &()); cursor.seek(&Height(goal_top), Bias::Left);
let start_ix = cursor.start().count; let start_ix = cursor.start().count;
let start_item_top = cursor.start().height; let start_item_top = cursor.start().height;
@ -372,11 +372,11 @@ impl ListState {
} }
let mut cursor = state.items.cursor::<(Count, Height)>(&()); let mut cursor = state.items.cursor::<(Count, Height)>(&());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let scroll_top = cursor.start().1.0 + scroll_top.offset_in_item; let scroll_top = cursor.start().1.0 + scroll_top.offset_in_item;
cursor.seek_forward(&Count(ix), Bias::Right, &()); cursor.seek_forward(&Count(ix), Bias::Right);
if let Some(&ListItem::Measured { size, .. }) = cursor.item() { if let Some(&ListItem::Measured { size, .. }) = cursor.item() {
let &(Count(count), Height(top)) = cursor.start(); let &(Count(count), Height(top)) = cursor.start();
if count == ix { if count == ix {
@ -431,7 +431,7 @@ impl ListState {
let mut cursor = state.items.cursor::<ListItemSummary>(&()); let mut cursor = state.items.cursor::<ListItemSummary>(&());
let summary: ListItemSummary = let summary: ListItemSummary =
cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right, &()); cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right);
let content_height = state.items.summary().height; let content_height = state.items.summary().height;
let drag_offset = let drag_offset =
// if dragging the scrollbar, we want to offset the point if the height changed // if dragging the scrollbar, we want to offset the point if the height changed
@ -450,9 +450,9 @@ impl ListState {
impl StateInner { impl StateInner {
fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range<usize> { fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range<usize> {
let mut cursor = self.items.cursor::<ListItemSummary>(&()); let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let start_y = cursor.start().height + scroll_top.offset_in_item; let start_y = cursor.start().height + scroll_top.offset_in_item;
cursor.seek_forward(&Height(start_y + height), Bias::Left, &()); cursor.seek_forward(&Height(start_y + height), Bias::Left);
scroll_top.item_ix..cursor.start().count + 1 scroll_top.item_ix..cursor.start().count + 1
} }
@ -482,7 +482,7 @@ impl StateInner {
self.logical_scroll_top = None; self.logical_scroll_top = None;
} else { } else {
let mut cursor = self.items.cursor::<ListItemSummary>(&()); let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Height(new_scroll_top), Bias::Right, &()); cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count; let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height; let offset_in_item = new_scroll_top - cursor.start().height;
self.logical_scroll_top = Some(ListOffset { self.logical_scroll_top = Some(ListOffset {
@ -523,7 +523,7 @@ impl StateInner {
fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels { fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels {
let mut cursor = self.items.cursor::<ListItemSummary>(&()); let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &()); cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right);
cursor.start().height + logical_scroll_top.offset_in_item cursor.start().height + logical_scroll_top.offset_in_item
} }
@ -553,7 +553,7 @@ impl StateInner {
let mut cursor = old_items.cursor::<Count>(&()); let mut cursor = old_items.cursor::<Count>(&());
// Render items after the scroll top, including those in the trailing overdraw // Render items after the scroll top, including those in the trailing overdraw
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
for (ix, item) in cursor.by_ref().enumerate() { for (ix, item) in cursor.by_ref().enumerate() {
let visible_height = rendered_height - scroll_top.offset_in_item; let visible_height = rendered_height - scroll_top.offset_in_item;
if visible_height >= available_height + self.overdraw { if visible_height >= available_height + self.overdraw {
@ -592,13 +592,13 @@ impl StateInner {
rendered_height += padding.bottom; rendered_height += padding.bottom;
// Prepare to start walking upward from the item at the scroll top. // Prepare to start walking upward from the item at the scroll top.
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
// If the rendered items do not fill the visible region, then adjust // If the rendered items do not fill the visible region, then adjust
// the scroll top upward. // the scroll top upward.
if rendered_height - scroll_top.offset_in_item < available_height { if rendered_height - scroll_top.offset_in_item < available_height {
while rendered_height < available_height { while rendered_height < available_height {
cursor.prev(&()); cursor.prev();
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
let item_index = cursor.start().0; let item_index = cursor.start().0;
let mut element = (self.render_item)(item_index, window, cx); let mut element = (self.render_item)(item_index, window, cx);
@ -645,7 +645,7 @@ impl StateInner {
// Measure items in the leading overdraw // Measure items in the leading overdraw
let mut leading_overdraw = scroll_top.offset_in_item; let mut leading_overdraw = scroll_top.offset_in_item;
while leading_overdraw < self.overdraw { while leading_overdraw < self.overdraw {
cursor.prev(&()); cursor.prev();
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
let size = if let ListItem::Measured { size, .. } = item { let size = if let ListItem::Measured { size, .. } = item {
*size *size
@ -666,10 +666,10 @@ impl StateInner {
let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len()); let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len());
let mut cursor = old_items.cursor::<Count>(&()); let mut cursor = old_items.cursor::<Count>(&());
let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &()); let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right);
new_items.extend(measured_items, &()); new_items.extend(measured_items, &());
cursor.seek(&Count(measured_range.end), Bias::Right, &()); cursor.seek(&Count(measured_range.end), Bias::Right);
new_items.append(cursor.suffix(&()), &()); new_items.append(cursor.suffix(), &());
self.items = new_items; self.items = new_items;
// If none of the visible items are focused, check if an off-screen item is focused // If none of the visible items are focused, check if an off-screen item is focused
@ -679,7 +679,7 @@ impl StateInner {
let mut cursor = self let mut cursor = self
.items .items
.filter::<_, Count>(&(), |summary| summary.has_focus_handles); .filter::<_, Count>(&(), |summary| summary.has_focus_handles);
cursor.next(&()); cursor.next();
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
if item.contains_focused(window, cx) { if item.contains_focused(window, cx) {
let item_index = cursor.start().0; let item_index = cursor.start().0;
@ -692,7 +692,7 @@ impl StateInner {
}); });
break; break;
} }
cursor.next(&()); cursor.next();
} }
} }
@ -741,7 +741,7 @@ impl StateInner {
}); });
} else if autoscroll_bounds.bottom() > bounds.bottom() { } else if autoscroll_bounds.bottom() > bounds.bottom() {
let mut cursor = self.items.cursor::<Count>(&()); let mut cursor = self.items.cursor::<Count>(&());
cursor.seek(&Count(item.index), Bias::Right, &()); cursor.seek(&Count(item.index), Bias::Right);
let mut height = bounds.size.height - padding.top - padding.bottom; let mut height = bounds.size.height - padding.top - padding.bottom;
// Account for the height of the element down until the autoscroll bottom. // Account for the height of the element down until the autoscroll bottom.
@ -749,7 +749,7 @@ impl StateInner {
// Keep decreasing the scroll top until we fill all the available space. // Keep decreasing the scroll top until we fill all the available space.
while height > Pixels::ZERO { while height > Pixels::ZERO {
cursor.prev(&()); cursor.prev();
let Some(item) = cursor.item() else { break }; let Some(item) = cursor.item() else { break };
let size = item.size().unwrap_or_else(|| { let size = item.size().unwrap_or_else(|| {
@ -806,7 +806,7 @@ impl StateInner {
self.logical_scroll_top = None; self.logical_scroll_top = None;
} else { } else {
let mut cursor = self.items.cursor::<ListItemSummary>(&()); let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Height(new_scroll_top), Bias::Right, &()); cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count; let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height; let offset_in_item = new_scroll_top - cursor.start().height;

View file

@ -50,8 +50,8 @@
/// KeyBinding::new("cmd-k left", pane::SplitLeft, Some("Pane")) /// KeyBinding::new("cmd-k left", pane::SplitLeft, Some("Pane"))
/// ///
use crate::{ use crate::{
Action, ActionRegistry, App, BindingIndex, DispatchPhase, EntityId, FocusId, KeyBinding, Action, ActionRegistry, App, DispatchPhase, EntityId, FocusId, KeyBinding, KeyContext, Keymap,
KeyContext, Keymap, Keystroke, ModifiersChangedEvent, Window, Keystroke, ModifiersChangedEvent, Window,
}; };
use collections::FxHashMap; use collections::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
@ -406,16 +406,11 @@ impl DispatchTree {
// methods, but this can't be done very cleanly since keymap must be borrowed. // methods, but this can't be done very cleanly since keymap must be borrowed.
let keymap = self.keymap.borrow(); let keymap = self.keymap.borrow();
keymap keymap
.bindings_for_action_with_indices(action) .bindings_for_action(action)
.filter(|(binding_index, binding)| { .filter(|binding| {
Self::binding_matches_predicate_and_not_shadowed( Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
&keymap,
*binding_index,
&binding.keystrokes,
context_stack,
)
}) })
.map(|(_, binding)| binding.clone()) .cloned()
.collect() .collect()
} }
@ -428,28 +423,22 @@ impl DispatchTree {
) -> Option<KeyBinding> { ) -> Option<KeyBinding> {
let keymap = self.keymap.borrow(); let keymap = self.keymap.borrow();
keymap keymap
.bindings_for_action_with_indices(action) .bindings_for_action(action)
.rev() .rev()
.find_map(|(binding_index, binding)| { .find(|binding| {
let found = Self::binding_matches_predicate_and_not_shadowed( Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
&keymap,
binding_index,
&binding.keystrokes,
context_stack,
);
if found { Some(binding.clone()) } else { None }
}) })
.cloned()
} }
fn binding_matches_predicate_and_not_shadowed( fn binding_matches_predicate_and_not_shadowed(
keymap: &Keymap, keymap: &Keymap,
binding_index: BindingIndex, binding: &KeyBinding,
keystrokes: &[Keystroke],
context_stack: &[KeyContext], context_stack: &[KeyContext],
) -> bool { ) -> bool {
let (bindings, _) = keymap.bindings_for_input_with_indices(&keystrokes, context_stack); let (bindings, _) = keymap.bindings_for_input(&binding.keystrokes, context_stack);
if let Some((highest_precedence_index, _)) = bindings.iter().next() { if let Some(found) = bindings.iter().next() {
binding_index == *highest_precedence_index found.action.partial_eq(binding.action.as_ref())
} else { } else {
false false
} }

View file

@ -5,7 +5,7 @@ pub use binding::*;
pub use context::*; pub use context::*;
use crate::{Action, Keystroke, is_no_action}; use crate::{Action, Keystroke, is_no_action};
use collections::HashMap; use collections::{HashMap, HashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use std::any::TypeId; use std::any::TypeId;
@ -77,15 +77,6 @@ impl Keymap {
&'a self, &'a self,
action: &'a dyn Action, action: &'a dyn Action,
) -> impl 'a + DoubleEndedIterator<Item = &'a KeyBinding> { ) -> impl 'a + DoubleEndedIterator<Item = &'a KeyBinding> {
self.bindings_for_action_with_indices(action)
.map(|(_, binding)| binding)
}
/// Like `bindings_for_action_with_indices`, but also returns the binding indices.
pub fn bindings_for_action_with_indices<'a>(
&'a self,
action: &'a dyn Action,
) -> impl 'a + DoubleEndedIterator<Item = (BindingIndex, &'a KeyBinding)> {
let action_id = action.type_id(); let action_id = action.type_id();
let binding_indices = self let binding_indices = self
.binding_indices_by_action_id .binding_indices_by_action_id
@ -118,7 +109,7 @@ impl Keymap {
} }
} }
Some((BindingIndex(*ix), binding)) Some(binding)
}) })
} }
@ -153,90 +144,53 @@ impl Keymap {
input: &[Keystroke], input: &[Keystroke],
context_stack: &[KeyContext], context_stack: &[KeyContext],
) -> (SmallVec<[KeyBinding; 1]>, bool) { ) -> (SmallVec<[KeyBinding; 1]>, bool) {
let (bindings, pending) = self.bindings_for_input_with_indices(input, context_stack); let mut matched_bindings = SmallVec::<[(usize, BindingIndex, &KeyBinding); 1]>::new();
let bindings = bindings let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new();
.into_iter()
.map(|(_, binding)| binding)
.collect::<SmallVec<[KeyBinding; 1]>>();
(bindings, pending)
}
/// Like `bindings_for_input`, but also returns the binding indices. for (ix, binding) in self.bindings().enumerate().rev() {
pub fn bindings_for_input_with_indices( let Some(depth) = self.binding_enabled(binding, &context_stack) else {
&self, continue;
input: &[Keystroke], };
context_stack: &[KeyContext], let Some(pending) = binding.match_keystrokes(input) else {
) -> (SmallVec<[(BindingIndex, KeyBinding); 1]>, bool) { continue;
let mut possibilities = self };
.bindings()
.enumerate() if !pending {
.rev() matched_bindings.push((depth, BindingIndex(ix), binding));
.filter_map(|(ix, binding)| { } else {
let depth = self.binding_enabled(binding, &context_stack)?; pending_bindings.push((BindingIndex(ix), binding));
let pending = binding.match_keystrokes(input)?; }
Some((depth, BindingIndex(ix), binding, pending)) }
})
.collect::<Vec<_>>(); matched_bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| {
possibilities.sort_by(|(depth_a, ix_a, _, _), (depth_b, ix_b, _, _)| {
depth_b.cmp(depth_a).then(ix_b.cmp(ix_a)) depth_b.cmp(depth_a).then(ix_b.cmp(ix_a))
}); });
let mut bindings: SmallVec<[(BindingIndex, KeyBinding, usize); 1]> = SmallVec::new(); let mut bindings: SmallVec<[_; 1]> = SmallVec::new();
let mut first_binding_index = None;
// (pending, is_no_action, depth, keystrokes) for (_, ix, binding) in matched_bindings {
let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None; if is_no_action(&*binding.action) {
break;
'outer: for (depth, binding_index, binding, pending) in possibilities {
let is_no_action = is_no_action(&*binding.action);
// We only want to consider a binding pending if it has an action
// This, however, means that if we have both a NoAction binding and a binding
// with an action at the same depth, we should still set is_pending to true.
if let Some(pending_info) = pending_info_opt.as_mut() {
let (already_pending, pending_is_no_action, pending_depth, pending_keystrokes) =
*pending_info;
// We only want to change the pending status if it's not already pending AND if
// the existing pending status was set by a NoAction binding. This avoids a NoAction
// binding erroneously setting the pending status to true when a binding with an action
// already set it to false
//
// We also want to change the pending status if the keystrokes don't match,
// meaning it's different keystrokes than the NoAction that set pending to false
if pending
&& !already_pending
&& pending_is_no_action
&& (pending_depth == depth || pending_keystrokes != binding.keystrokes())
{
pending_info.0 = !is_no_action;
}
} else {
pending_info_opt = Some((
pending && !is_no_action,
is_no_action,
depth,
binding.keystrokes(),
));
}
if !pending {
bindings.push((binding_index, binding.clone(), depth));
continue 'outer;
} }
bindings.push(binding.clone());
first_binding_index.get_or_insert(ix);
} }
// sort by descending depth
bindings.sort_by(|a, b| a.2.cmp(&b.2).reverse());
let bindings = bindings
.into_iter()
.map_while(|(binding_index, binding, _)| {
if is_no_action(&*binding.action) {
None
} else {
Some((binding_index, binding))
}
})
.collect();
(bindings, pending_info_opt.unwrap_or_default().0) let mut pending = HashSet::default();
for (ix, binding) in pending_bindings.into_iter().rev() {
if let Some(binding_ix) = first_binding_index
&& binding_ix > ix
{
continue;
}
if is_no_action(&*binding.action) {
pending.remove(&&binding.keystrokes);
continue;
}
pending.insert(&binding.keystrokes);
}
(bindings, !pending.is_empty())
} }
/// Check if the given binding is enabled, given a certain key context. /// Check if the given binding is enabled, given a certain key context.
@ -302,6 +256,30 @@ mod tests {
); );
} }
#[test]
fn test_depth_precedence() {
let bindings = [
KeyBinding::new("ctrl-a", ActionBeta {}, Some("pane")),
KeyBinding::new("ctrl-a", ActionGamma {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
&[
KeyContext::parse("pane").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert!(!pending);
assert_eq!(result.len(), 2);
assert!(result[0].action.partial_eq(&ActionGamma {}));
assert!(result[1].action.partial_eq(&ActionBeta {}));
}
#[test] #[test]
fn test_keymap_disabled() { fn test_keymap_disabled() {
let bindings = [ let bindings = [
@ -453,6 +431,193 @@ mod tests {
assert_eq!(space_editor.1, true); assert_eq!(space_editor.1, true);
} }
#[test]
fn test_override_multikey() {
let bindings = [
KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-w", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-w").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
assert_eq!(pending, true);
let bindings = [
KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-w", ActionBeta {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-w").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert_eq!(result.len(), 1);
assert_eq!(pending, false);
}
#[test]
fn test_simple_disable() {
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
assert_eq!(pending, false);
}
#[test]
fn test_fail_to_disable() {
// disabled at the wrong level
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-x", NoAction {}, Some("workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[
KeyContext::parse("workspace").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert_eq!(result.len(), 1);
assert_eq!(pending, false);
}
#[test]
fn test_disable_deeper() {
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("workspace")),
KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[
KeyContext::parse("workspace").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert_eq!(result.len(), 0);
assert_eq!(pending, false);
}
#[test]
fn test_pending_match_enabled() {
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(matched.1);
}
#[test]
fn test_pending_match_enabled_extended() {
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("Workspace")),
KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
}
#[test]
fn test_overriding_prefix() {
let bindings = [
KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
}
#[test] #[test]
fn test_bindings_for_action() { fn test_bindings_for_action() {
let bindings = [ let bindings = [

View file

@ -283,7 +283,7 @@ impl ToTaffy<taffy::style::LengthPercentageAuto> for Length {
fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::LengthPercentageAuto { fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::LengthPercentageAuto {
match self { match self {
Length::Definite(length) => length.to_taffy(rem_size), Length::Definite(length) => length.to_taffy(rem_size),
Length::Auto => taffy::prelude::LengthPercentageAuto::Auto, Length::Auto => taffy::prelude::LengthPercentageAuto::auto(),
} }
} }
} }
@ -292,7 +292,7 @@ impl ToTaffy<taffy::style::Dimension> for Length {
fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::Dimension { fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::Dimension {
match self { match self {
Length::Definite(length) => length.to_taffy(rem_size), Length::Definite(length) => length.to_taffy(rem_size),
Length::Auto => taffy::prelude::Dimension::Auto, Length::Auto => taffy::prelude::Dimension::auto(),
} }
} }
} }
@ -302,14 +302,14 @@ impl ToTaffy<taffy::style::LengthPercentage> for DefiniteLength {
match self { match self {
DefiniteLength::Absolute(length) => match length { DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => { AbsoluteLength::Pixels(pixels) => {
taffy::style::LengthPercentage::Length(pixels.into()) taffy::style::LengthPercentage::length(pixels.into())
} }
AbsoluteLength::Rems(rems) => { AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentage::Length((*rems * rem_size).into()) taffy::style::LengthPercentage::length((*rems * rem_size).into())
} }
}, },
DefiniteLength::Fraction(fraction) => { DefiniteLength::Fraction(fraction) => {
taffy::style::LengthPercentage::Percent(*fraction) taffy::style::LengthPercentage::percent(*fraction)
} }
} }
} }
@ -320,14 +320,14 @@ impl ToTaffy<taffy::style::LengthPercentageAuto> for DefiniteLength {
match self { match self {
DefiniteLength::Absolute(length) => match length { DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => { AbsoluteLength::Pixels(pixels) => {
taffy::style::LengthPercentageAuto::Length(pixels.into()) taffy::style::LengthPercentageAuto::length(pixels.into())
} }
AbsoluteLength::Rems(rems) => { AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentageAuto::Length((*rems * rem_size).into()) taffy::style::LengthPercentageAuto::length((*rems * rem_size).into())
} }
}, },
DefiniteLength::Fraction(fraction) => { DefiniteLength::Fraction(fraction) => {
taffy::style::LengthPercentageAuto::Percent(*fraction) taffy::style::LengthPercentageAuto::percent(*fraction)
} }
} }
} }
@ -337,12 +337,12 @@ impl ToTaffy<taffy::style::Dimension> for DefiniteLength {
fn to_taffy(&self, rem_size: Pixels) -> taffy::style::Dimension { fn to_taffy(&self, rem_size: Pixels) -> taffy::style::Dimension {
match self { match self {
DefiniteLength::Absolute(length) => match length { DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::Length(pixels.into()), AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::length(pixels.into()),
AbsoluteLength::Rems(rems) => { AbsoluteLength::Rems(rems) => {
taffy::style::Dimension::Length((*rems * rem_size).into()) taffy::style::Dimension::length((*rems * rem_size).into())
} }
}, },
DefiniteLength::Fraction(fraction) => taffy::style::Dimension::Percent(*fraction), DefiniteLength::Fraction(fraction) => taffy::style::Dimension::percent(*fraction),
} }
} }
} }
@ -350,9 +350,9 @@ impl ToTaffy<taffy::style::Dimension> for DefiniteLength {
impl ToTaffy<taffy::style::LengthPercentage> for AbsoluteLength { impl ToTaffy<taffy::style::LengthPercentage> for AbsoluteLength {
fn to_taffy(&self, rem_size: Pixels) -> taffy::style::LengthPercentage { fn to_taffy(&self, rem_size: Pixels) -> taffy::style::LengthPercentage {
match self { match self {
AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::Length(pixels.into()), AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::length(pixels.into()),
AbsoluteLength::Rems(rems) => { AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentage::Length((*rems * rem_size).into()) taffy::style::LengthPercentage::length((*rems * rem_size).into())
} }
} }
} }

View file

@ -21,6 +21,7 @@ anyhow.workspace = true
derive_more.workspace = true derive_more.workspace = true
futures.workspace = true futures.workspace = true
http.workspace = true http.workspace = true
http-body.workspace = true
log.workspace = true log.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

View file

@ -6,6 +6,7 @@ use std::{
use bytes::Bytes; use bytes::Bytes;
use futures::AsyncRead; use futures::AsyncRead;
use http_body::{Body, Frame};
/// Based on the implementation of AsyncBody in /// Based on the implementation of AsyncBody in
/// <https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs>. /// <https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs>.
@ -114,3 +115,24 @@ impl futures::AsyncRead for AsyncBody {
} }
} }
} }
impl Body for AsyncBody {
type Data = Bytes;
type Error = std::io::Error;
fn poll_frame(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Result<Frame<Self::Data>, Self::Error>>> {
let mut buffer = vec![0; 8192];
match AsyncRead::poll_read(self.as_mut(), cx, &mut buffer) {
Poll::Ready(Ok(0)) => Poll::Ready(None),
Poll::Ready(Ok(n)) => {
let data = Bytes::copy_from_slice(&buffer[..n]);
Poll::Ready(Some(Ok(Frame::data(data))))
}
Poll::Ready(Err(e)) => Poll::Ready(Some(Err(e))),
Poll::Pending => Poll::Pending,
}
}
}

View file

@ -20,6 +20,7 @@ pub enum IconName {
AiMistral, AiMistral,
AiOllama, AiOllama,
AiOpenAi, AiOpenAi,
AiOpenAiCompat,
AiOpenRouter, AiOpenRouter,
AiVZero, AiVZero,
AiXAi, AiXAi,

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use client::{UserStore, zed_urls}; use client::{DisableAiSettings, UserStore, zed_urls};
use copilot::{Copilot, Status}; use copilot::{Copilot, Status};
use editor::{ use editor::{
Editor, SelectionEffects, Editor, SelectionEffects,
@ -72,6 +72,11 @@ enum SupermavenButtonStatus {
impl Render for InlineCompletionButton { impl Render for InlineCompletionButton {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement { fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
// Return empty div if AI is disabled
if DisableAiSettings::get_global(cx).disable_ai {
return div();
}
let all_language_settings = all_language_settings(None, cx); let all_language_settings = all_language_settings(None, cx);
match all_language_settings.edit_predictions.provider { match all_language_settings.edit_predictions.provider {

View file

@ -2072,6 +2072,21 @@ impl Buffer {
self.text.push_transaction(transaction, now); self.text.push_transaction(transaction, now);
} }
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
self.text.push_empty_transaction(now)
}
/// Prevent the last transaction from being grouped with any subsequent transactions, /// Prevent the last transaction from being grouped with any subsequent transactions,
/// even if they occur with the buffer's undo grouping duration. /// even if they occur with the buffer's undo grouping duration.
pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> { pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {

View file

@ -158,17 +158,17 @@ impl DiagnosticSet {
}); });
if reversed { if reversed {
cursor.prev(buffer); cursor.prev();
} else { } else {
cursor.next(buffer); cursor.next();
} }
iter::from_fn({ iter::from_fn({
move || { move || {
if let Some(diagnostic) = cursor.item() { if let Some(diagnostic) = cursor.item() {
if reversed { if reversed {
cursor.prev(buffer); cursor.prev();
} else { } else {
cursor.next(buffer); cursor.next();
} }
Some(diagnostic.resolve(buffer)) Some(diagnostic.resolve(buffer))
} else { } else {

View file

@ -297,10 +297,10 @@ impl SyntaxSnapshot {
let mut first_edit_ix_for_depth = 0; let mut first_edit_ix_for_depth = 0;
let mut prev_depth = 0; let mut prev_depth = 0;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text); let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
cursor.next(text); cursor.next();
'outer: loop { 'outer: loop {
let depth = cursor.end(text).max_depth; let depth = cursor.end().max_depth;
if depth > prev_depth { if depth > prev_depth {
first_edit_ix_for_depth = 0; first_edit_ix_for_depth = 0;
prev_depth = depth; prev_depth = depth;
@ -313,7 +313,7 @@ impl SyntaxSnapshot {
position: edit_range.start, position: edit_range.start,
}; };
if target.cmp(cursor.start(), text).is_gt() { if target.cmp(cursor.start(), text).is_gt() {
let slice = cursor.slice(&target, Bias::Left, text); let slice = cursor.slice(&target, Bias::Left);
layers.append(slice, text); layers.append(slice, text);
} }
} }
@ -327,7 +327,6 @@ impl SyntaxSnapshot {
language: None, language: None,
}, },
Bias::Left, Bias::Left,
text,
); );
layers.append(slice, text); layers.append(slice, text);
continue; continue;
@ -394,10 +393,10 @@ impl SyntaxSnapshot {
} }
layers.push(layer, text); layers.push(layer, text);
cursor.next(text); cursor.next();
} }
layers.append(cursor.suffix(text), text); layers.append(cursor.suffix(), text);
drop(cursor); drop(cursor);
self.layers = layers; self.layers = layers;
} }
@ -420,7 +419,7 @@ impl SyntaxSnapshot {
let mut cursor = self let mut cursor = self
.layers .layers
.filter::<_, ()>(text, |summary| summary.contains_unknown_injections); .filter::<_, ()>(text, |summary| summary.contains_unknown_injections);
cursor.next(text); cursor.next();
while let Some(layer) = cursor.item() { while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else { let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!() unreachable!()
@ -436,7 +435,7 @@ impl SyntaxSnapshot {
resolved_injection_ranges.push(range); resolved_injection_ranges.push(range);
} }
cursor.next(text); cursor.next();
} }
drop(cursor); drop(cursor);
@ -469,7 +468,7 @@ impl SyntaxSnapshot {
let max_depth = self.layers.summary().max_depth; let max_depth = self.layers.summary().max_depth;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text); let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
cursor.next(text); cursor.next();
let mut layers = SumTree::new(text); let mut layers = SumTree::new(text);
let mut changed_regions = ChangeRegionSet::default(); let mut changed_regions = ChangeRegionSet::default();
@ -514,7 +513,7 @@ impl SyntaxSnapshot {
}; };
let mut done = cursor.item().is_none(); let mut done = cursor.item().is_none();
while !done && position.cmp(&cursor.end(text), text).is_gt() { while !done && position.cmp(&cursor.end(), text).is_gt() {
done = true; done = true;
let bounded_position = SyntaxLayerPositionBeforeChange { let bounded_position = SyntaxLayerPositionBeforeChange {
@ -522,16 +521,16 @@ impl SyntaxSnapshot {
change: changed_regions.start_position(), change: changed_regions.start_position(),
}; };
if bounded_position.cmp(cursor.start(), text).is_gt() { if bounded_position.cmp(cursor.start(), text).is_gt() {
let slice = cursor.slice(&bounded_position, Bias::Left, text); let slice = cursor.slice(&bounded_position, Bias::Left);
if !slice.is_empty() { if !slice.is_empty() {
layers.append(slice, text); layers.append(slice, text);
if changed_regions.prune(cursor.end(text), text) { if changed_regions.prune(cursor.end(), text) {
done = false; done = false;
} }
} }
} }
while position.cmp(&cursor.end(text), text).is_gt() { while position.cmp(&cursor.end(), text).is_gt() {
let Some(layer) = cursor.item() else { break }; let Some(layer) = cursor.item() else { break };
if changed_regions.intersects(layer, text) { if changed_regions.intersects(layer, text) {
@ -555,8 +554,8 @@ impl SyntaxSnapshot {
layers.push(layer.clone(), text); layers.push(layer.clone(), text);
} }
cursor.next(text); cursor.next();
if changed_regions.prune(cursor.end(text), text) { if changed_regions.prune(cursor.end(), text) {
done = false; done = false;
} }
} }
@ -572,7 +571,7 @@ impl SyntaxSnapshot {
if layer.range.to_offset(text) == (step_start_byte..step_end_byte) if layer.range.to_offset(text) == (step_start_byte..step_end_byte)
&& layer.content.language_id() == step.language.id() && layer.content.language_id() == step.language.id()
{ {
cursor.next(text); cursor.next();
} else { } else {
old_layer = None; old_layer = None;
} }
@ -918,7 +917,7 @@ impl SyntaxSnapshot {
} }
}); });
cursor.next(buffer); cursor.next();
iter::from_fn(move || { iter::from_fn(move || {
while let Some(layer) = cursor.item() { while let Some(layer) = cursor.item() {
let mut info = None; let mut info = None;
@ -940,7 +939,7 @@ impl SyntaxSnapshot {
}); });
} }
} }
cursor.next(buffer); cursor.next();
if info.is_some() { if info.is_some() {
return info; return info;
} }

View file

@ -10,25 +10,21 @@ use http_client::Result;
use parking_lot::Mutex; use parking_lot::Mutex;
use std::sync::Arc; use std::sync::Arc;
pub fn language_model_id() -> LanguageModelId { #[derive(Clone)]
LanguageModelId::from("fake".to_string()) pub struct FakeLanguageModelProvider {
id: LanguageModelProviderId,
name: LanguageModelProviderName,
} }
pub fn language_model_name() -> LanguageModelName { impl Default for FakeLanguageModelProvider {
LanguageModelName::from("Fake".to_string()) fn default() -> Self {
Self {
id: LanguageModelProviderId::from("fake".to_string()),
name: LanguageModelProviderName::from("Fake".to_string()),
}
}
} }
pub fn provider_id() -> LanguageModelProviderId {
LanguageModelProviderId::from("fake".to_string())
}
pub fn provider_name() -> LanguageModelProviderName {
LanguageModelProviderName::from("Fake".to_string())
}
#[derive(Clone, Default)]
pub struct FakeLanguageModelProvider;
impl LanguageModelProviderState for FakeLanguageModelProvider { impl LanguageModelProviderState for FakeLanguageModelProvider {
type ObservableEntity = (); type ObservableEntity = ();
@ -39,11 +35,11 @@ impl LanguageModelProviderState for FakeLanguageModelProvider {
impl LanguageModelProvider for FakeLanguageModelProvider { impl LanguageModelProvider for FakeLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId { fn id(&self) -> LanguageModelProviderId {
provider_id() self.id.clone()
} }
fn name(&self) -> LanguageModelProviderName { fn name(&self) -> LanguageModelProviderName {
provider_name() self.name.clone()
} }
fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> { fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
@ -76,6 +72,10 @@ impl LanguageModelProvider for FakeLanguageModelProvider {
} }
impl FakeLanguageModelProvider { impl FakeLanguageModelProvider {
pub fn new(id: LanguageModelProviderId, name: LanguageModelProviderName) -> Self {
Self { id, name }
}
pub fn test_model(&self) -> FakeLanguageModel { pub fn test_model(&self) -> FakeLanguageModel {
FakeLanguageModel::default() FakeLanguageModel::default()
} }
@ -89,11 +89,22 @@ pub struct ToolUseRequest {
pub schema: serde_json::Value, pub schema: serde_json::Value,
} }
#[derive(Default)]
pub struct FakeLanguageModel { pub struct FakeLanguageModel {
provider_id: LanguageModelProviderId,
provider_name: LanguageModelProviderName,
current_completion_txs: Mutex<Vec<(LanguageModelRequest, mpsc::UnboundedSender<String>)>>, current_completion_txs: Mutex<Vec<(LanguageModelRequest, mpsc::UnboundedSender<String>)>>,
} }
impl Default for FakeLanguageModel {
fn default() -> Self {
Self {
provider_id: LanguageModelProviderId::from("fake".to_string()),
provider_name: LanguageModelProviderName::from("Fake".to_string()),
current_completion_txs: Mutex::new(Vec::new()),
}
}
}
impl FakeLanguageModel { impl FakeLanguageModel {
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> { pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs self.current_completion_txs
@ -138,19 +149,19 @@ impl FakeLanguageModel {
impl LanguageModel for FakeLanguageModel { impl LanguageModel for FakeLanguageModel {
fn id(&self) -> LanguageModelId { fn id(&self) -> LanguageModelId {
language_model_id() LanguageModelId::from("fake".to_string())
} }
fn name(&self) -> LanguageModelName { fn name(&self) -> LanguageModelName {
language_model_name() LanguageModelName::from("Fake".to_string())
} }
fn provider_id(&self) -> LanguageModelProviderId { fn provider_id(&self) -> LanguageModelProviderId {
provider_id() self.provider_id.clone()
} }
fn provider_name(&self) -> LanguageModelProviderName { fn provider_name(&self) -> LanguageModelProviderName {
provider_name() self.provider_name.clone()
} }
fn supports_tools(&self) -> bool { fn supports_tools(&self) -> bool {

View file

@ -735,6 +735,18 @@ impl From<String> for LanguageModelProviderName {
} }
} }
impl From<Arc<str>> for LanguageModelProviderId {
fn from(value: Arc<str>) -> Self {
Self(SharedString::from(value))
}
}
impl From<Arc<str>> for LanguageModelProviderName {
fn from(value: Arc<str>) -> Self {
Self(SharedString::from(value))
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -125,7 +125,7 @@ impl LanguageModelRegistry {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut App) -> crate::fake_provider::FakeLanguageModelProvider { pub fn test(cx: &mut App) -> crate::fake_provider::FakeLanguageModelProvider {
let fake_provider = crate::fake_provider::FakeLanguageModelProvider; let fake_provider = crate::fake_provider::FakeLanguageModelProvider::default();
let registry = cx.new(|cx| { let registry = cx.new(|cx| {
let mut registry = Self::default(); let mut registry = Self::default();
registry.register_provider(fake_provider.clone(), cx); registry.register_provider(fake_provider.clone(), cx);
@ -403,16 +403,17 @@ mod tests {
fn test_register_providers(cx: &mut App) { fn test_register_providers(cx: &mut App) {
let registry = cx.new(|_| LanguageModelRegistry::default()); let registry = cx.new(|_| LanguageModelRegistry::default());
let provider = FakeLanguageModelProvider::default();
registry.update(cx, |registry, cx| { registry.update(cx, |registry, cx| {
registry.register_provider(FakeLanguageModelProvider, cx); registry.register_provider(provider.clone(), cx);
}); });
let providers = registry.read(cx).providers(); let providers = registry.read(cx).providers();
assert_eq!(providers.len(), 1); assert_eq!(providers.len(), 1);
assert_eq!(providers[0].id(), crate::fake_provider::provider_id()); assert_eq!(providers[0].id(), provider.id());
registry.update(cx, |registry, cx| { registry.update(cx, |registry, cx| {
registry.unregister_provider(crate::fake_provider::provider_id(), cx); registry.unregister_provider(provider.id(), cx);
}); });
let providers = registry.read(cx).providers(); let providers = registry.read(cx).providers();

View file

@ -26,10 +26,10 @@ client.workspace = true
collections.workspace = true collections.workspace = true
component.workspace = true component.workspace = true
credentials_provider.workspace = true credentials_provider.workspace = true
convert_case.workspace = true
copilot.workspace = true copilot.workspace = true
deepseek = { workspace = true, features = ["schemars"] } deepseek = { workspace = true, features = ["schemars"] }
editor.workspace = true editor.workspace = true
fs.workspace = true
futures.workspace = true futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] } google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true gpui.workspace = true

View file

@ -1,8 +1,10 @@
use std::sync::Arc; use std::sync::Arc;
use ::settings::{Settings, SettingsStore};
use client::{Client, UserStore}; use client::{Client, UserStore};
use collections::HashSet;
use gpui::{App, Context, Entity}; use gpui::{App, Context, Entity};
use language_model::LanguageModelRegistry; use language_model::{LanguageModelProviderId, LanguageModelRegistry};
use provider::deepseek::DeepSeekLanguageModelProvider; use provider::deepseek::DeepSeekLanguageModelProvider;
pub mod provider; pub mod provider;
@ -18,17 +20,81 @@ use crate::provider::lmstudio::LmStudioLanguageModelProvider;
use crate::provider::mistral::MistralLanguageModelProvider; use crate::provider::mistral::MistralLanguageModelProvider;
use crate::provider::ollama::OllamaLanguageModelProvider; use crate::provider::ollama::OllamaLanguageModelProvider;
use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_ai::OpenAiLanguageModelProvider;
use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider;
use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider;
use crate::provider::vercel::VercelLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider;
use crate::provider::x_ai::XAiLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider;
pub use crate::settings::*; pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) { pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
crate::settings::init(cx); crate::settings::init_settings(cx);
let registry = LanguageModelRegistry::global(cx); let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| { registry.update(cx, |registry, cx| {
register_language_model_providers(registry, user_store, client, cx); register_language_model_providers(registry, user_store, client.clone(), cx);
}); });
let mut openai_compatible_providers = AllLanguageModelSettings::get_global(cx)
.openai_compatible
.keys()
.cloned()
.collect::<HashSet<_>>();
registry.update(cx, |registry, cx| {
register_openai_compatible_providers(
registry,
&HashSet::default(),
&openai_compatible_providers,
client.clone(),
cx,
);
});
cx.observe_global::<SettingsStore>(move |cx| {
let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx)
.openai_compatible
.keys()
.cloned()
.collect::<HashSet<_>>();
if openai_compatible_providers_new != openai_compatible_providers {
registry.update(cx, |registry, cx| {
register_openai_compatible_providers(
registry,
&openai_compatible_providers,
&openai_compatible_providers_new,
client.clone(),
cx,
);
});
openai_compatible_providers = openai_compatible_providers_new;
}
})
.detach();
}
fn register_openai_compatible_providers(
registry: &mut LanguageModelRegistry,
old: &HashSet<Arc<str>>,
new: &HashSet<Arc<str>>,
client: Arc<Client>,
cx: &mut Context<LanguageModelRegistry>,
) {
for provider_id in old {
if !new.contains(provider_id) {
registry.unregister_provider(LanguageModelProviderId::from(provider_id.clone()), cx);
}
}
for provider_id in new {
if !old.contains(provider_id) {
registry.register_provider(
OpenAiCompatibleLanguageModelProvider::new(
provider_id.clone(),
client.http_client(),
cx,
),
cx,
);
}
}
} }
fn register_language_model_providers( fn register_language_model_providers(

View file

@ -8,6 +8,7 @@ pub mod lmstudio;
pub mod mistral; pub mod mistral;
pub mod ollama; pub mod ollama;
pub mod open_ai; pub mod open_ai;
pub mod open_ai_compatible;
pub mod open_router; pub mod open_router;
pub mod vercel; pub mod vercel;
pub mod x_ai; pub mod x_ai;

View file

@ -243,7 +243,7 @@ impl State {
pub struct BedrockLanguageModelProvider { pub struct BedrockLanguageModelProvider {
http_client: AwsHttpClient, http_client: AwsHttpClient,
handler: tokio::runtime::Handle, handle: tokio::runtime::Handle,
state: gpui::Entity<State>, state: gpui::Entity<State>,
} }
@ -258,13 +258,9 @@ impl BedrockLanguageModelProvider {
}), }),
}); });
let tokio_handle = Tokio::handle(cx);
let coerced_client = AwsHttpClient::new(http_client.clone(), tokio_handle.clone());
Self { Self {
http_client: coerced_client, http_client: AwsHttpClient::new(http_client.clone()),
handler: tokio_handle.clone(), handle: Tokio::handle(cx),
state, state,
} }
} }
@ -274,7 +270,7 @@ impl BedrockLanguageModelProvider {
id: LanguageModelId::from(model.id().to_string()), id: LanguageModelId::from(model.id().to_string()),
model, model,
http_client: self.http_client.clone(), http_client: self.http_client.clone(),
handler: self.handler.clone(), handle: self.handle.clone(),
state: self.state.clone(), state: self.state.clone(),
client: OnceCell::new(), client: OnceCell::new(),
request_limiter: RateLimiter::new(4), request_limiter: RateLimiter::new(4),
@ -375,7 +371,7 @@ struct BedrockModel {
id: LanguageModelId, id: LanguageModelId,
model: Model, model: Model,
http_client: AwsHttpClient, http_client: AwsHttpClient,
handler: tokio::runtime::Handle, handle: tokio::runtime::Handle,
client: OnceCell<BedrockClient>, client: OnceCell<BedrockClient>,
state: gpui::Entity<State>, state: gpui::Entity<State>,
request_limiter: RateLimiter, request_limiter: RateLimiter,
@ -447,7 +443,7 @@ impl BedrockModel {
} }
} }
let config = self.handler.block_on(config_builder.load()); let config = self.handle.block_on(config_builder.load());
anyhow::Ok(BedrockClient::new(&config)) anyhow::Ok(BedrockClient::new(&config))
}) })
.context("initializing Bedrock client")?; .context("initializing Bedrock client")?;

View file

@ -2,7 +2,6 @@ use anyhow::{Context as _, Result, anyhow};
use collections::{BTreeMap, HashMap}; use collections::{BTreeMap, HashMap};
use credentials_provider::CredentialsProvider; use credentials_provider::CredentialsProvider;
use fs::Fs;
use futures::Stream; use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture}; use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
@ -18,7 +17,7 @@ use menu;
use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore, update_settings_file}; use settings::{Settings, SettingsStore};
use std::pin::Pin; use std::pin::Pin;
use std::str::FromStr as _; use std::str::FromStr as _;
use std::sync::Arc; use std::sync::Arc;
@ -28,7 +27,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
use ui_input::SingleLineInput; use ui_input::SingleLineInput;
use util::ResultExt; use util::ResultExt;
use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem}; use crate::{AllLanguageModelSettings, ui::InstructionListItem};
const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID; const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
@ -621,26 +619,32 @@ struct RawToolCall {
arguments: String, arguments: String,
} }
pub(crate) fn collect_tiktoken_messages(
request: LanguageModelRequest,
) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>()
}
pub fn count_open_ai_tokens( pub fn count_open_ai_tokens(
request: LanguageModelRequest, request: LanguageModelRequest,
model: Model, model: Model,
cx: &App, cx: &App,
) -> BoxFuture<'static, Result<u64>> { ) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move { cx.background_spawn(async move {
let messages = request let messages = collect_tiktoken_messages(request);
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>();
match model { match model {
Model::Custom { max_tokens, .. } => { Model::Custom { max_tokens, .. } => {
@ -678,7 +682,6 @@ pub fn count_open_ai_tokens(
struct ConfigurationView { struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>, api_key_editor: Entity<SingleLineInput>,
api_url_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>, state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>, load_credentials_task: Option<Task<()>>,
} }
@ -691,23 +694,6 @@ impl ConfigurationView {
cx, cx,
"sk-000000000000000000000000000000000000000000000000", "sk-000000000000000000000000000000000000000000000000",
) )
.label("API key")
});
let api_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let api_url_editor = cx.new(|cx| {
let input = SingleLineInput::new(window, cx, open_ai::OPEN_AI_API_URL).label("API URL");
if !api_url.is_empty() {
input.editor.update(cx, |editor, cx| {
editor.set_text(&*api_url, window, cx);
});
}
input
}); });
cx.observe(&state, |_, _, cx| { cx.observe(&state, |_, _, cx| {
@ -735,7 +721,6 @@ impl ConfigurationView {
Self { Self {
api_key_editor, api_key_editor,
api_url_editor,
state, state,
load_credentials_task, load_credentials_task,
} }
@ -783,57 +768,6 @@ impl ConfigurationView {
cx.notify(); cx.notify();
} }
fn save_api_url(&mut self, cx: &mut Context<Self>) {
let api_url = self
.api_url_editor
.read(cx)
.editor()
.read(cx)
.text(cx)
.trim()
.to_string();
let current_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let effective_current_url = if current_url.is_empty() {
open_ai::OPEN_AI_API_URL
} else {
&current_url
};
if !api_url.is_empty() && api_url != effective_current_url {
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = Some(api_url.clone());
} else {
settings.openai = Some(OpenAiSettingsContent {
api_url: Some(api_url.clone()),
available_models: None,
});
}
});
}
}
fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.api_url_editor.update(cx, |input, cx| {
input.editor.update(cx, |editor, cx| {
editor.set_text("", window, cx);
});
});
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = None;
}
});
cx.notify();
}
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool { fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated() !self.state.read(cx).is_authenticated()
} }
@ -846,7 +780,6 @@ impl Render for ConfigurationView {
let api_key_section = if self.should_render_editor(cx) { let api_key_section = if self.should_render_editor(cx) {
v_flex() v_flex()
.on_action(cx.listener(Self::save_api_key)) .on_action(cx.listener(Self::save_api_key))
.child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:")) .child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:"))
.child( .child(
List::new() List::new()
@ -910,59 +843,34 @@ impl Render for ConfigurationView {
.into_any() .into_any()
}; };
let custom_api_url_set = let compatible_api_section = h_flex()
AllLanguageModelSettings::get_global(cx).openai.api_url != open_ai::OPEN_AI_API_URL; .mt_1p5()
.gap_0p5()
let api_url_section = if custom_api_url_set { .flex_wrap()
h_flex() .when(self.should_render_editor(cx), |this| {
.mt_1() this.pt_1p5()
.p_1() .border_t_1()
.justify_between() .border_color(cx.theme().colors().border_variant)
.rounded_md() })
.border_1() .child(
.border_color(cx.theme().colors().border) h_flex()
.bg(cx.theme().colors().background) .gap_2()
.child( .child(
h_flex() Icon::new(IconName::Info)
.gap_1() .size(IconSize::XSmall)
.child(Icon::new(IconName::Check).color(Color::Success)) .color(Color::Muted),
.child(Label::new("Custom API URL configured.")), )
) .child(Label::new("Zed also supports OpenAI-compatible models.")),
.child( )
Button::new("reset-api-url", "Reset API URL") .child(
.label_size(LabelSize::Small) Button::new("docs", "Learn More")
.icon(IconName::Undo) .icon(IconName::ArrowUpRight)
.icon_size(IconSize::Small) .icon_size(IconSize::XSmall)
.icon_position(IconPosition::Start) .icon_color(Color::Muted)
.layer(ElevationIndex::ModalSurface) .on_click(move |_, _window, cx| {
.on_click( cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible")
cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)), }),
), );
)
.into_any()
} else {
v_flex()
.on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
this.save_api_url(cx);
cx.notify();
}))
.mt_2()
.pt_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.gap_1()
.child(
List::new()
.child(InstructionListItem::text_only(
"Optionally, you can change the base URL for the OpenAI API request.",
))
.child(InstructionListItem::text_only(
"Paste the new API endpoint below and hit enter",
)),
)
.child(self.api_url_editor.clone())
.into_any()
};
if self.load_credentials_task.is_some() { if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any() div().child(Label::new("Loading credentials…")).into_any()
@ -970,7 +878,7 @@ impl Render for ConfigurationView {
v_flex() v_flex()
.size_full() .size_full()
.child(api_key_section) .child(api_key_section)
.child(api_url_section) .child(compatible_api_section)
.into_any() .into_any()
} }
} }

View file

@ -0,0 +1,522 @@
use anyhow::{Context as _, Result, anyhow};
use credentials_provider::CredentialsProvider;
use convert_case::{Case, Casing};
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
use http_client::HttpClient;
use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
LanguageModelToolChoice, RateLimiter,
};
use menu;
use open_ai::{ResponseStreamEvent, stream_completion};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::sync::Arc;
use ui::{ElevationIndex, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
use crate::AllLanguageModelSettings;
use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
#[derive(Default, Clone, Debug, PartialEq)]
pub struct OpenAiCompatibleSettings {
pub api_url: String,
pub available_models: Vec<AvailableModel>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
pub struct AvailableModel {
pub name: String,
pub display_name: Option<String>,
pub max_tokens: u64,
pub max_output_tokens: Option<u64>,
pub max_completion_tokens: Option<u64>,
}
pub struct OpenAiCompatibleLanguageModelProvider {
id: LanguageModelProviderId,
name: LanguageModelProviderName,
http_client: Arc<dyn HttpClient>,
state: gpui::Entity<State>,
}
pub struct State {
id: Arc<str>,
env_var_name: Arc<str>,
api_key: Option<String>,
api_key_from_env: bool,
settings: OpenAiCompatibleSettings,
_subscription: Subscription,
}
impl State {
fn is_authenticated(&self) -> bool {
self.api_key.is_some()
}
fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.delete_credentials(&api_url, &cx)
.await
.log_err();
this.update(cx, |this, cx| {
this.api_key = None;
this.api_key_from_env = false;
cx.notify();
})
})
}
fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
.await
.log_err();
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
cx.notify();
})
})
}
fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
if self.is_authenticated() {
return Task::ready(Ok(()));
}
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let env_var_name = self.env_var_name.clone();
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
.read_credentials(&api_url, &cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
false,
)
};
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
this.api_key_from_env = from_env;
cx.notify();
})?;
Ok(())
})
}
}
impl OpenAiCompatibleLanguageModelProvider {
pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
AllLanguageModelSettings::get_global(cx)
.openai_compatible
.get(id)
}
let state = cx.new(|cx| State {
id: id.clone(),
env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
api_key: None,
api_key_from_env: false,
_subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
let Some(settings) = resolve_settings(&this.id, cx) else {
return;
};
if &this.settings != settings {
this.settings = settings.clone();
cx.notify();
}
}),
});
Self {
id: id.clone().into(),
name: id.into(),
http_client,
state,
}
}
fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
Arc::new(OpenAiCompatibleLanguageModel {
id: LanguageModelId::from(model.name.clone()),
provider_id: self.id.clone(),
provider_name: self.name.clone(),
model,
state: self.state.clone(),
http_client: self.http_client.clone(),
request_limiter: RateLimiter::new(4),
})
}
}
impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
type ObservableEntity = State;
fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
Some(self.state.clone())
}
}
impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
self.id.clone()
}
fn name(&self) -> LanguageModelProviderName {
self.name.clone()
}
fn icon(&self) -> IconName {
IconName::AiOpenAiCompat
}
fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
self.state
.read(cx)
.settings
.available_models
.first()
.map(|model| self.create_language_model(model.clone()))
}
fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
None
}
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
self.state
.read(cx)
.settings
.available_models
.iter()
.map(|model| self.create_language_model(model.clone()))
.collect()
}
fn is_authenticated(&self, cx: &App) -> bool {
self.state.read(cx).is_authenticated()
}
fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
self.state.update(cx, |state, cx| state.authenticate(cx))
}
fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
.into()
}
fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
self.state.update(cx, |state, cx| state.reset_api_key(cx))
}
}
pub struct OpenAiCompatibleLanguageModel {
id: LanguageModelId,
provider_id: LanguageModelProviderId,
provider_name: LanguageModelProviderName,
model: AvailableModel,
state: gpui::Entity<State>,
http_client: Arc<dyn HttpClient>,
request_limiter: RateLimiter,
}
impl OpenAiCompatibleLanguageModel {
fn stream_completion(
&self,
request: open_ai::Request,
cx: &AsyncApp,
) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
{
let http_client = self.http_client.clone();
let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
(state.api_key.clone(), state.settings.api_url.clone())
}) else {
return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
};
let provider = self.provider_name.clone();
let future = self.request_limiter.stream(async move {
let Some(api_key) = api_key else {
return Err(LanguageModelCompletionError::NoApiKey { provider });
};
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
Ok(response)
});
async move { Ok(future.await?.boxed()) }.boxed()
}
}
impl LanguageModel for OpenAiCompatibleLanguageModel {
fn id(&self) -> LanguageModelId {
self.id.clone()
}
fn name(&self) -> LanguageModelName {
LanguageModelName::from(
self.model
.display_name
.clone()
.unwrap_or_else(|| self.model.name.clone()),
)
}
fn provider_id(&self) -> LanguageModelProviderId {
self.provider_id.clone()
}
fn provider_name(&self) -> LanguageModelProviderName {
self.provider_name.clone()
}
fn supports_tools(&self) -> bool {
true
}
fn supports_images(&self) -> bool {
false
}
fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
match choice {
LanguageModelToolChoice::Auto => true,
LanguageModelToolChoice::Any => true,
LanguageModelToolChoice::None => true,
}
}
fn telemetry_id(&self) -> String {
format!("openai/{}", self.model.name)
}
fn max_token_count(&self) -> u64 {
self.model.max_tokens
}
fn max_output_tokens(&self) -> Option<u64> {
self.model.max_output_tokens
}
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
let max_token_count = self.max_token_count();
cx.background_spawn(async move {
let messages = super::open_ai::collect_tiktoken_messages(request);
let model = if max_token_count >= 100_000 {
// If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
"gpt-4o"
} else {
// Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
// supported with this tiktoken method
"gpt-4"
};
tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
})
.boxed()
}
fn stream_completion(
&self,
request: LanguageModelRequest,
cx: &AsyncApp,
) -> BoxFuture<
'static,
Result<
futures::stream::BoxStream<
'static,
Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
>,
LanguageModelCompletionError,
>,
> {
let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens());
let completions = self.stream_completion(request, cx);
async move {
let mapper = OpenAiEventMapper::new();
Ok(mapper.map_stream(completions.await?).boxed())
}
.boxed()
}
}
struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>,
}
impl ConfigurationView {
fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
let api_key_editor = cx.new(|cx| {
SingleLineInput::new(
window,
cx,
"000000000000000000000000000000000000000000000000000",
)
});
cx.observe(&state, |_, _, cx| {
cx.notify();
})
.detach();
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
if let Some(task) = state
.update(cx, |state, cx| state.authenticate(cx))
.log_err()
{
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
this.update(cx, |this, cx| {
this.load_credentials_task = None;
cx.notify();
})
.log_err();
}
}));
Self {
api_key_editor,
state,
load_credentials_task,
}
}
fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
let api_key = self
.api_key_editor
.read(cx)
.editor()
.read(cx)
.text(cx)
.trim()
.to_string();
// Don't proceed if no API key is provided and we're not authenticated
if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
return;
}
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
.update(cx, |state, cx| state.set_api_key(api_key, cx))?
.await
})
.detach_and_log_err(cx);
cx.notify();
}
fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.api_key_editor.update(cx, |input, cx| {
input.editor.update(cx, |editor, cx| {
editor.set_text("", window, cx);
});
});
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state.update(cx, |state, cx| state.reset_api_key(cx))?.await
})
.detach_and_log_err(cx);
cx.notify();
}
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated()
}
}
impl Render for ConfigurationView {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let env_var_set = self.state.read(cx).api_key_from_env;
let env_var_name = self.state.read(cx).env_var_name.clone();
let api_key_section = if self.should_render_editor(cx) {
v_flex()
.on_action(cx.listener(Self::save_api_key))
.child(Label::new("To use Zed's assistant with an OpenAI compatible provider, you need to add an API key."))
.child(
div()
.pt(DynamicSpacing::Base04.rems(cx))
.child(self.api_key_editor.clone())
)
.child(
Label::new(
format!("You can also assign the {env_var_name} environment variable and restart Zed."),
)
.size(LabelSize::Small).color(Color::Muted),
)
.into_any()
} else {
h_flex()
.mt_1()
.p_1()
.justify_between()
.rounded_md()
.border_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().background)
.child(
h_flex()
.gap_1()
.child(Icon::new(IconName::Check).color(Color::Success))
.child(Label::new(if env_var_set {
format!("API key set in {env_var_name} environment variable.")
} else {
"API key configured.".to_string()
})),
)
.child(
Button::new("reset-api-key", "Reset API Key")
.label_size(LabelSize::Small)
.icon(IconName::Undo)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.layer(ElevationIndex::ModalSurface)
.when(env_var_set, |this| {
this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
})
.on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
)
.into_any()
};
if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any()
} else {
v_flex().size_full().child(api_key_section).into_any()
}
}
}

View file

@ -1,4 +1,7 @@
use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use collections::HashMap;
use gpui::App; use gpui::App;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -15,13 +18,14 @@ use crate::provider::{
mistral::MistralSettings, mistral::MistralSettings,
ollama::OllamaSettings, ollama::OllamaSettings,
open_ai::OpenAiSettings, open_ai::OpenAiSettings,
open_ai_compatible::OpenAiCompatibleSettings,
open_router::OpenRouterSettings, open_router::OpenRouterSettings,
vercel::VercelSettings, vercel::VercelSettings,
x_ai::XAiSettings, x_ai::XAiSettings,
}; };
/// Initializes the language model settings. /// Initializes the language model settings.
pub fn init(cx: &mut App) { pub fn init_settings(cx: &mut App) {
AllLanguageModelSettings::register(cx); AllLanguageModelSettings::register(cx);
} }
@ -36,6 +40,7 @@ pub struct AllLanguageModelSettings {
pub ollama: OllamaSettings, pub ollama: OllamaSettings,
pub open_router: OpenRouterSettings, pub open_router: OpenRouterSettings,
pub openai: OpenAiSettings, pub openai: OpenAiSettings,
pub openai_compatible: HashMap<Arc<str>, OpenAiCompatibleSettings>,
pub vercel: VercelSettings, pub vercel: VercelSettings,
pub x_ai: XAiSettings, pub x_ai: XAiSettings,
pub zed_dot_dev: ZedDotDevSettings, pub zed_dot_dev: ZedDotDevSettings,
@ -52,6 +57,7 @@ pub struct AllLanguageModelSettingsContent {
pub ollama: Option<OllamaSettingsContent>, pub ollama: Option<OllamaSettingsContent>,
pub open_router: Option<OpenRouterSettingsContent>, pub open_router: Option<OpenRouterSettingsContent>,
pub openai: Option<OpenAiSettingsContent>, pub openai: Option<OpenAiSettingsContent>,
pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
pub vercel: Option<VercelSettingsContent>, pub vercel: Option<VercelSettingsContent>,
pub x_ai: Option<XAiSettingsContent>, pub x_ai: Option<XAiSettingsContent>,
#[serde(rename = "zed.dev")] #[serde(rename = "zed.dev")]
@ -103,6 +109,12 @@ pub struct OpenAiSettingsContent {
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>, pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OpenAiCompatibleSettingsContent {
pub api_url: String,
pub available_models: Vec<provider::open_ai_compatible::AvailableModel>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct VercelSettingsContent { pub struct VercelSettingsContent {
pub api_url: Option<String>, pub api_url: Option<String>,
@ -226,6 +238,19 @@ impl settings::Settings for AllLanguageModelSettings {
openai.as_ref().and_then(|s| s.available_models.clone()), openai.as_ref().and_then(|s| s.available_models.clone()),
); );
// OpenAI Compatible
if let Some(openai_compatible) = value.openai_compatible.clone() {
for (id, openai_compatible_settings) in openai_compatible {
settings.openai_compatible.insert(
id,
OpenAiCompatibleSettings {
api_url: openai_compatible_settings.api_url,
available_models: openai_compatible_settings.available_models,
},
);
}
}
// Vercel // Vercel
let vercel = value.vercel.clone(); let vercel = value.vercel.clone();
merge( merge(

View file

@ -1211,7 +1211,7 @@ impl MultiBuffer {
let buffer = buffer_state.buffer.read(cx); let buffer = buffer_state.buffer.read(cx);
for range in buffer.edited_ranges_for_transaction_id::<D>(*buffer_transaction) { for range in buffer.edited_ranges_for_transaction_id::<D>(*buffer_transaction) {
for excerpt_id in &buffer_state.excerpts { for excerpt_id in &buffer_state.excerpts {
cursor.seek(excerpt_id, Bias::Left, &()); cursor.seek(excerpt_id, Bias::Left);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
if excerpt.locator == *excerpt_id { if excerpt.locator == *excerpt_id {
let excerpt_buffer_start = let excerpt_buffer_start =
@ -1322,7 +1322,7 @@ impl MultiBuffer {
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
cursor.seek(&Some(start_locator), Bias::Left, &()); cursor.seek(&Some(start_locator), Bias::Left);
while let Some(excerpt) = cursor.item() { while let Some(excerpt) = cursor.item() {
if excerpt.locator > *end_locator { if excerpt.locator > *end_locator {
break; break;
@ -1347,7 +1347,7 @@ impl MultiBuffer {
goal: selection.goal, goal: selection.goal,
}); });
cursor.next(&()); cursor.next();
} }
} }
@ -1769,13 +1769,13 @@ impl MultiBuffer {
let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id));
let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&()); let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
excerpts_cursor.next(&()); excerpts_cursor.next();
loop { loop {
let new = new_iter.peek(); let new = new_iter.peek();
let existing = if let Some(existing_id) = existing_iter.peek() { let existing = if let Some(existing_id) = existing_iter.peek() {
let locator = snapshot.excerpt_locator_for_id(*existing_id); let locator = snapshot.excerpt_locator_for_id(*existing_id);
excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &()); excerpts_cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts_cursor.item() { if let Some(excerpt) = excerpts_cursor.item() {
if excerpt.buffer_id != buffer_snapshot.remote_id() { if excerpt.buffer_id != buffer_snapshot.remote_id() {
to_remove.push(*existing_id); to_remove.push(*existing_id);
@ -1970,7 +1970,7 @@ impl MultiBuffer {
let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&()); let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &()); let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right);
prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
let edit_start = ExcerptOffset::new(new_excerpts.summary().text.len); let edit_start = ExcerptOffset::new(new_excerpts.summary().text.len);
@ -2019,7 +2019,7 @@ impl MultiBuffer {
let edit_end = ExcerptOffset::new(new_excerpts.summary().text.len); let edit_end = ExcerptOffset::new(new_excerpts.summary().text.len);
let suffix = cursor.suffix(&()); let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty(); let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &()); new_excerpts.append(suffix, &());
drop(cursor); drop(cursor);
@ -2104,7 +2104,7 @@ impl MultiBuffer {
.into_iter() .into_iter()
.flatten() .flatten()
{ {
cursor.seek_forward(&Some(locator), Bias::Left, &()); cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator { if excerpt.locator == *locator {
excerpts.push((excerpt.id, excerpt.range.clone())); excerpts.push((excerpt.id, excerpt.range.clone()));
@ -2124,25 +2124,25 @@ impl MultiBuffer {
let mut diff_transforms = snapshot let mut diff_transforms = snapshot
.diff_transforms .diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&()); .cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
diff_transforms.next(&()); diff_transforms.next();
let locators = buffers let locators = buffers
.get(&buffer_id) .get(&buffer_id)
.into_iter() .into_iter()
.flat_map(|state| &state.excerpts); .flat_map(|state| &state.excerpts);
let mut result = Vec::new(); let mut result = Vec::new();
for locator in locators { for locator in locators {
excerpts.seek_forward(&Some(locator), Bias::Left, &()); excerpts.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts.item() { if let Some(excerpt) = excerpts.item() {
if excerpt.locator == *locator { if excerpt.locator == *locator {
let excerpt_start = excerpts.start().1.clone(); let excerpt_start = excerpts.start().1.clone();
let excerpt_end = let excerpt_end =
ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines); ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines);
diff_transforms.seek_forward(&excerpt_start, Bias::Left, &()); diff_transforms.seek_forward(&excerpt_start, Bias::Left);
let overshoot = excerpt_start.0 - diff_transforms.start().0.0; let overshoot = excerpt_start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot; let start = diff_transforms.start().1.0 + overshoot;
diff_transforms.seek_forward(&excerpt_end, Bias::Right, &()); diff_transforms.seek_forward(&excerpt_end, Bias::Right);
let overshoot = excerpt_end.0 - diff_transforms.start().0.0; let overshoot = excerpt_end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot; let end = diff_transforms.start().1.0 + overshoot;
@ -2290,7 +2290,7 @@ impl MultiBuffer {
self.paths_by_excerpt.remove(&excerpt_id); self.paths_by_excerpt.remove(&excerpt_id);
// Seek to the next excerpt to remove, preserving any preceding excerpts. // Seek to the next excerpt to remove, preserving any preceding excerpts.
let locator = snapshot.excerpt_locator_for_id(excerpt_id); let locator = snapshot.excerpt_locator_for_id(excerpt_id);
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
if let Some(mut excerpt) = cursor.item() { if let Some(mut excerpt) = cursor.item() {
if excerpt.id != excerpt_id { if excerpt.id != excerpt_id {
@ -2311,7 +2311,7 @@ impl MultiBuffer {
removed_buffer_ids.push(excerpt.buffer_id); removed_buffer_ids.push(excerpt.buffer_id);
} }
} }
cursor.next(&()); cursor.next();
// Skip over any subsequent excerpts that are also removed. // Skip over any subsequent excerpts that are also removed.
if let Some(&next_excerpt_id) = excerpt_ids.peek() { if let Some(&next_excerpt_id) = excerpt_ids.peek() {
@ -2344,7 +2344,7 @@ impl MultiBuffer {
}); });
} }
} }
let suffix = cursor.suffix(&()); let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty(); let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &()); new_excerpts.append(suffix, &());
drop(cursor); drop(cursor);
@ -2493,7 +2493,7 @@ impl MultiBuffer {
let mut cursor = snapshot let mut cursor = snapshot
.excerpts .excerpts
.cursor::<(Option<&Locator>, ExcerptOffset)>(&()); .cursor::<(Option<&Locator>, ExcerptOffset)>(&());
cursor.seek_forward(&Some(locator), Bias::Left, &()); cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator { if excerpt.locator == *locator {
let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer);
@ -2724,7 +2724,7 @@ impl MultiBuffer {
let snapshot = self.read(cx); let snapshot = self.read(cx);
let mut cursor = snapshot.diff_transforms.cursor::<usize>(&()); let mut cursor = snapshot.diff_transforms.cursor::<usize>(&());
let offset_range = range.to_offset(&snapshot); let offset_range = range.to_offset(&snapshot);
cursor.seek(&offset_range.start, Bias::Left, &()); cursor.seek(&offset_range.start, Bias::Left);
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
if *cursor.start() >= offset_range.end && *cursor.start() > offset_range.start { if *cursor.start() >= offset_range.end && *cursor.start() > offset_range.start {
break; break;
@ -2732,7 +2732,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() { if item.hunk_info().is_some() {
return true; return true;
} }
cursor.next(&()); cursor.next();
} }
false false
} }
@ -2746,7 +2746,7 @@ impl MultiBuffer {
let end = snapshot.point_to_offset(Point::new(range.end.row + 1, 0)); let end = snapshot.point_to_offset(Point::new(range.end.row + 1, 0));
let start = start.saturating_sub(1); let start = start.saturating_sub(1);
let end = snapshot.len().min(end + 1); let end = snapshot.len().min(end + 1);
cursor.seek(&start, Bias::Right, &()); cursor.seek(&start, Bias::Right);
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
if *cursor.start() >= end { if *cursor.start() >= end {
break; break;
@ -2754,7 +2754,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() { if item.hunk_info().is_some() {
return true; return true;
} }
cursor.next(&()); cursor.next();
} }
} }
false false
@ -2848,7 +2848,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&()); .cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let mut edits = Vec::<Edit<ExcerptOffset>>::new(); let mut edits = Vec::<Edit<ExcerptOffset>>::new();
let prefix = cursor.slice(&Some(locator), Bias::Left, &()); let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &()); new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone(); let mut excerpt = cursor.item().unwrap().clone();
@ -2883,9 +2883,9 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &()); new_excerpts.push(excerpt, &());
cursor.next(&()); cursor.next();
new_excerpts.append(cursor.suffix(&()), &()); new_excerpts.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;
@ -2925,7 +2925,7 @@ impl MultiBuffer {
let mut edits = Vec::<Edit<ExcerptOffset>>::new(); let mut edits = Vec::<Edit<ExcerptOffset>>::new();
for locator in &locators { for locator in &locators {
let prefix = cursor.slice(&Some(locator), Bias::Left, &()); let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &()); new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone(); let mut excerpt = cursor.item().unwrap().clone();
@ -2987,10 +2987,10 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &()); new_excerpts.push(excerpt, &());
cursor.next(&()); cursor.next();
} }
new_excerpts.append(cursor.suffix(&()), &()); new_excerpts.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;
@ -3070,7 +3070,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&()); .cursor::<(Option<&Locator>, ExcerptOffset)>(&());
for (locator, buffer, buffer_edited) in excerpts_to_edit { for (locator, buffer, buffer_edited) in excerpts_to_edit {
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
let old_excerpt = cursor.item().unwrap(); let old_excerpt = cursor.item().unwrap();
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let buffer_id = buffer.remote_id(); let buffer_id = buffer.remote_id();
@ -3112,9 +3112,9 @@ impl MultiBuffer {
} }
new_excerpts.push(new_excerpt, &()); new_excerpts.push(new_excerpt, &());
cursor.next(&()); cursor.next();
} }
new_excerpts.append(cursor.suffix(&()), &()); new_excerpts.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;
@ -3145,23 +3145,22 @@ impl MultiBuffer {
let mut excerpt_edits = excerpt_edits.into_iter().peekable(); let mut excerpt_edits = excerpt_edits.into_iter().peekable();
while let Some(edit) = excerpt_edits.next() { while let Some(edit) = excerpt_edits.next() {
excerpts.seek_forward(&edit.new.start, Bias::Right, &()); excerpts.seek_forward(&edit.new.start, Bias::Right);
if excerpts.item().is_none() && *excerpts.start() == edit.new.start { if excerpts.item().is_none() && *excerpts.start() == edit.new.start {
excerpts.prev(&()); excerpts.prev();
} }
// Keep any transforms that are before the edit. // Keep any transforms that are before the edit.
if at_transform_boundary { if at_transform_boundary {
at_transform_boundary = false; at_transform_boundary = false;
let transforms_before_edit = let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left);
old_diff_transforms.slice(&edit.old.start, Bias::Left, &());
self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit); self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit);
if let Some(transform) = old_diff_transforms.item() { if let Some(transform) = old_diff_transforms.item() {
if old_diff_transforms.end(&()).0 == edit.old.start if old_diff_transforms.end().0 == edit.old.start
&& old_diff_transforms.start().0 < edit.old.start && old_diff_transforms.start().0 < edit.old.start
{ {
self.push_diff_transform(&mut new_diff_transforms, transform.clone()); self.push_diff_transform(&mut new_diff_transforms, transform.clone());
old_diff_transforms.next(&()); old_diff_transforms.next();
} }
} }
} }
@ -3203,7 +3202,7 @@ impl MultiBuffer {
// then recreate the content up to the end of this transform, to prepare // then recreate the content up to the end of this transform, to prepare
// for reusing additional slices of the old transforms. // for reusing additional slices of the old transforms.
if excerpt_edits.peek().map_or(true, |next_edit| { if excerpt_edits.peek().map_or(true, |next_edit| {
next_edit.old.start >= old_diff_transforms.end(&()).0 next_edit.old.start >= old_diff_transforms.end().0
}) { }) {
let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end) let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
&& match old_diff_transforms.item() { && match old_diff_transforms.item() {
@ -3218,8 +3217,8 @@ impl MultiBuffer {
let mut excerpt_offset = edit.new.end; let mut excerpt_offset = edit.new.end;
if !keep_next_old_transform { if !keep_next_old_transform {
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end; excerpt_offset += old_diff_transforms.end().0 - edit.old.end;
old_diff_transforms.next(&()); old_diff_transforms.next();
} }
old_expanded_hunks.clear(); old_expanded_hunks.clear();
@ -3234,7 +3233,7 @@ impl MultiBuffer {
} }
// Keep any transforms that are after the last edit. // Keep any transforms that are after the last edit.
self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix(&())); self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix());
// Ensure there's always at least one buffer content transform. // Ensure there's always at least one buffer content transform.
if new_diff_transforms.is_empty() { if new_diff_transforms.is_empty() {
@ -3283,10 +3282,10 @@ impl MultiBuffer {
); );
old_expanded_hunks.insert(hunk_info); old_expanded_hunks.insert(hunk_info);
} }
if old_diff_transforms.end(&()).0 > edit.old.end { if old_diff_transforms.end().0 > edit.old.end {
break; break;
} }
old_diff_transforms.next(&()); old_diff_transforms.next();
} }
// Avoid querying diff hunks if there's no possibility of hunks being expanded. // Avoid querying diff hunks if there's no possibility of hunks being expanded.
@ -3413,8 +3412,8 @@ impl MultiBuffer {
} }
} }
if excerpts.end(&()) <= edit.new.end { if excerpts.end() <= edit.new.end {
excerpts.next(&()); excerpts.next();
} else { } else {
break; break;
} }
@ -3439,9 +3438,9 @@ impl MultiBuffer {
*summary, *summary,
) { ) {
let mut cursor = subtree.cursor::<()>(&()); let mut cursor = subtree.cursor::<()>(&());
cursor.next(&()); cursor.next();
cursor.next(&()); cursor.next();
new_transforms.append(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(), &());
return; return;
} }
} }
@ -4715,14 +4714,14 @@ impl MultiBufferSnapshot {
{ {
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&()); let mut cursor = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
let Some(first_transform) = cursor.item() else { let Some(first_transform) = cursor.item() else {
return D::from_text_summary(&TextSummary::default()); return D::from_text_summary(&TextSummary::default());
}; };
let diff_transform_start = cursor.start().0; let diff_transform_start = cursor.start().0;
let diff_transform_end = cursor.end(&()).0; let diff_transform_end = cursor.end().0;
let diff_start = range.start; let diff_start = range.start;
let start_overshoot = diff_start - diff_transform_start; let start_overshoot = diff_start - diff_transform_start;
let end_overshoot = std::cmp::min(range.end, diff_transform_end) - diff_transform_start; let end_overshoot = std::cmp::min(range.end, diff_transform_end) - diff_transform_start;
@ -4765,12 +4764,10 @@ impl MultiBufferSnapshot {
return result; return result;
} }
cursor.next(&()); cursor.next();
result.add_assign(&D::from_text_summary(&cursor.summary( result.add_assign(&D::from_text_summary(
&range.end, &cursor.summary(&range.end, Bias::Right),
Bias::Right, ));
&(),
)));
let Some(last_transform) = cursor.item() else { let Some(last_transform) = cursor.item() else {
return result; return result;
@ -4813,9 +4810,9 @@ impl MultiBufferSnapshot {
// let mut range = range.start..range.end; // let mut range = range.start..range.end;
let mut summary = D::zero(&()); let mut summary = D::zero(&());
let mut cursor = self.excerpts.cursor::<ExcerptOffset>(&()); let mut cursor = self.excerpts.cursor::<ExcerptOffset>(&());
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
let mut end_before_newline = cursor.end(&()); let mut end_before_newline = cursor.end();
if excerpt.has_trailing_newline { if excerpt.has_trailing_newline {
end_before_newline -= ExcerptOffset::new(1); end_before_newline -= ExcerptOffset::new(1);
} }
@ -4834,13 +4831,13 @@ impl MultiBufferSnapshot {
summary.add_assign(&D::from_text_summary(&TextSummary::from("\n"))); summary.add_assign(&D::from_text_summary(&TextSummary::from("\n")));
} }
cursor.next(&()); cursor.next();
} }
if range.end > *cursor.start() { if range.end > *cursor.start() {
summary.add_assign( summary.add_assign(
&cursor &cursor
.summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right, &()) .summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right)
.0, .0,
); );
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
@ -4876,11 +4873,11 @@ impl MultiBufferSnapshot {
D: TextDimension + Ord + Sub<D, Output = D>, D: TextDimension + Ord + Sub<D, Output = D>,
{ {
loop { loop {
let transform_end_position = diff_transforms.end(&()).0.0; let transform_end_position = diff_transforms.end().0.0;
let at_transform_end = let at_transform_end =
excerpt_position == transform_end_position && diff_transforms.item().is_some(); excerpt_position == transform_end_position && diff_transforms.item().is_some();
if at_transform_end && anchor.text_anchor.bias == Bias::Right { if at_transform_end && anchor.text_anchor.bias == Bias::Right {
diff_transforms.next(&()); diff_transforms.next();
continue; continue;
} }
@ -4906,7 +4903,7 @@ impl MultiBufferSnapshot {
); );
position.add_assign(&position_in_hunk); position.add_assign(&position_in_hunk);
} else if at_transform_end { } else if at_transform_end {
diff_transforms.next(&()); diff_transforms.next();
continue; continue;
} }
} }
@ -4915,7 +4912,7 @@ impl MultiBufferSnapshot {
} }
_ => { _ => {
if at_transform_end && anchor.diff_base_anchor.is_some() { if at_transform_end && anchor.diff_base_anchor.is_some() {
diff_transforms.next(&()); diff_transforms.next();
continue; continue;
} }
let overshoot = excerpt_position - diff_transforms.start().0.0; let overshoot = excerpt_position - diff_transforms.start().0.0;
@ -4933,9 +4930,9 @@ impl MultiBufferSnapshot {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&()); .cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let locator = self.excerpt_locator_for_id(anchor.excerpt_id); let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
cursor.seek(&Some(locator), Bias::Left, &()); cursor.seek(&Some(locator), Bias::Left);
if cursor.item().is_none() { if cursor.item().is_none() {
cursor.next(&()); cursor.next();
} }
let mut position = cursor.start().1; let mut position = cursor.start().1;
@ -4975,7 +4972,7 @@ impl MultiBufferSnapshot {
let mut diff_transforms_cursor = self let mut diff_transforms_cursor = self
.diff_transforms .diff_transforms
.cursor::<(ExcerptDimension<D>, OutputDimension<D>)>(&()); .cursor::<(ExcerptDimension<D>, OutputDimension<D>)>(&());
diff_transforms_cursor.next(&()); diff_transforms_cursor.next();
let mut summaries = Vec::new(); let mut summaries = Vec::new();
while let Some(anchor) = anchors.peek() { while let Some(anchor) = anchors.peek() {
@ -4990,9 +4987,9 @@ impl MultiBufferSnapshot {
}); });
let locator = self.excerpt_locator_for_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id);
cursor.seek_forward(locator, Bias::Left, &()); cursor.seek_forward(locator, Bias::Left);
if cursor.item().is_none() { if cursor.item().is_none() {
cursor.next(&()); cursor.next();
} }
let excerpt_start_position = D::from_text_summary(&cursor.start().text); let excerpt_start_position = D::from_text_summary(&cursor.start().text);
@ -5022,11 +5019,8 @@ impl MultiBufferSnapshot {
} }
if position > diff_transforms_cursor.start().0.0 { if position > diff_transforms_cursor.start().0.0 {
diff_transforms_cursor.seek_forward( diff_transforms_cursor
&ExcerptDimension(position), .seek_forward(&ExcerptDimension(position), Bias::Left);
Bias::Left,
&(),
);
} }
summaries.push(self.resolve_summary_for_anchor( summaries.push(self.resolve_summary_for_anchor(
@ -5036,11 +5030,8 @@ impl MultiBufferSnapshot {
)); ));
} }
} else { } else {
diff_transforms_cursor.seek_forward( diff_transforms_cursor
&ExcerptDimension(excerpt_start_position), .seek_forward(&ExcerptDimension(excerpt_start_position), Bias::Left);
Bias::Left,
&(),
);
let position = self.resolve_summary_for_anchor( let position = self.resolve_summary_for_anchor(
&Anchor::max(), &Anchor::max(),
excerpt_start_position, excerpt_start_position,
@ -5099,7 +5090,7 @@ impl MultiBufferSnapshot {
{ {
let mut anchors = anchors.into_iter().enumerate().peekable(); let mut anchors = anchors.into_iter().enumerate().peekable();
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&()); let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.next(&()); cursor.next();
let mut result = Vec::new(); let mut result = Vec::new();
@ -5108,10 +5099,10 @@ impl MultiBufferSnapshot {
// Find the location where this anchor's excerpt should be. // Find the location where this anchor's excerpt should be.
let old_locator = self.excerpt_locator_for_id(old_excerpt_id); let old_locator = self.excerpt_locator_for_id(old_excerpt_id);
cursor.seek_forward(&Some(old_locator), Bias::Left, &()); cursor.seek_forward(&Some(old_locator), Bias::Left);
if cursor.item().is_none() { if cursor.item().is_none() {
cursor.next(&()); cursor.next();
} }
let next_excerpt = cursor.item(); let next_excerpt = cursor.item();
@ -5211,13 +5202,13 @@ impl MultiBufferSnapshot {
// Find the given position in the diff transforms. Determine the corresponding // Find the given position in the diff transforms. Determine the corresponding
// offset in the excerpts, and whether the position is within a deleted hunk. // offset in the excerpts, and whether the position is within a deleted hunk.
let mut diff_transforms = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&()); let mut diff_transforms = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
diff_transforms.seek(&offset, Bias::Right, &()); diff_transforms.seek(&offset, Bias::Right);
if offset == diff_transforms.start().0 && bias == Bias::Left { if offset == diff_transforms.start().0 && bias == Bias::Left {
if let Some(prev_item) = diff_transforms.prev_item() { if let Some(prev_item) = diff_transforms.prev_item() {
match prev_item { match prev_item {
DiffTransform::DeletedHunk { .. } => { DiffTransform::DeletedHunk { .. } => {
diff_transforms.prev(&()); diff_transforms.prev();
} }
_ => {} _ => {}
} }
@ -5260,13 +5251,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self let mut excerpts = self
.excerpts .excerpts
.cursor::<(ExcerptOffset, Option<ExcerptId>)>(&()); .cursor::<(ExcerptOffset, Option<ExcerptId>)>(&());
excerpts.seek(&excerpt_offset, Bias::Right, &()); excerpts.seek(&excerpt_offset, Bias::Right);
if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left { if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left {
excerpts.prev(&()); excerpts.prev();
} }
if let Some(excerpt) = excerpts.item() { if let Some(excerpt) = excerpts.item() {
let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0).value; let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0).value;
if excerpt.has_trailing_newline && excerpt_offset == excerpts.end(&()).0 { if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 {
overshoot -= 1; overshoot -= 1;
bias = Bias::Right; bias = Bias::Right;
} }
@ -5297,7 +5288,7 @@ impl MultiBufferSnapshot {
let excerpt_id = self.latest_excerpt_id(excerpt_id); let excerpt_id = self.latest_excerpt_id(excerpt_id);
let locator = self.excerpt_locator_for_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id);
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&()); let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.seek(locator, Bias::Left, &()); cursor.seek(locator, Bias::Left);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id { if excerpt.id == excerpt_id {
let text_anchor = excerpt.clip_anchor(text_anchor); let text_anchor = excerpt.clip_anchor(text_anchor);
@ -5351,13 +5342,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self let mut excerpts = self
.excerpts .excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<usize>)>(&()); .cursor::<(Option<&Locator>, ExcerptDimension<usize>)>(&());
excerpts.seek(&Some(start_locator), Bias::Left, &()); excerpts.seek(&Some(start_locator), Bias::Left);
excerpts.prev(&()); excerpts.prev();
let mut diff_transforms = self.diff_transforms.cursor::<DiffTransforms<usize>>(&()); let mut diff_transforms = self.diff_transforms.cursor::<DiffTransforms<usize>>(&());
diff_transforms.seek(&excerpts.start().1, Bias::Left, &()); diff_transforms.seek(&excerpts.start().1, Bias::Left);
if diff_transforms.end(&()).excerpt_dimension < excerpts.start().1 { if diff_transforms.end().excerpt_dimension < excerpts.start().1 {
diff_transforms.next(&()); diff_transforms.next();
} }
let excerpt = excerpts.item()?; let excerpt = excerpts.item()?;
@ -6193,7 +6184,7 @@ impl MultiBufferSnapshot {
Locator::max_ref() Locator::max_ref()
} else { } else {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&()); let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
cursor.seek(&id, Bias::Left, &()); cursor.seek(&id, Bias::Left);
if let Some(entry) = cursor.item() { if let Some(entry) = cursor.item() {
if entry.id == id { if entry.id == id {
return &entry.locator; return &entry.locator;
@ -6229,7 +6220,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&()); let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
for id in sorted_ids { for id in sorted_ids {
if cursor.seek_forward(&id, Bias::Left, &()) { if cursor.seek_forward(&id, Bias::Left) {
locators.push(cursor.item().unwrap().locator.clone()); locators.push(cursor.item().unwrap().locator.clone());
} else { } else {
panic!("invalid excerpt id {:?}", id); panic!("invalid excerpt id {:?}", id);
@ -6253,16 +6244,16 @@ impl MultiBufferSnapshot {
.excerpts .excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<Point>)>(&()); .cursor::<(Option<&Locator>, ExcerptDimension<Point>)>(&());
let locator = self.excerpt_locator_for_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id);
if cursor.seek(&Some(locator), Bias::Left, &()) { if cursor.seek(&Some(locator), Bias::Left) {
let start = cursor.start().1.clone(); let start = cursor.start().1.clone();
let end = cursor.end(&()).1; let end = cursor.end().1;
let mut diff_transforms = self let mut diff_transforms = self
.diff_transforms .diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&()); .cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
diff_transforms.seek(&start, Bias::Left, &()); diff_transforms.seek(&start, Bias::Left);
let overshoot = start.0 - diff_transforms.start().0.0; let overshoot = start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot; let start = diff_transforms.start().1.0 + overshoot;
diff_transforms.seek(&end, Bias::Right, &()); diff_transforms.seek(&end, Bias::Right);
let overshoot = end.0 - diff_transforms.start().0.0; let overshoot = end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot; let end = diff_transforms.start().1.0 + overshoot;
Some(start..end) Some(start..end)
@ -6274,7 +6265,7 @@ impl MultiBufferSnapshot {
pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<Range<text::Anchor>> { pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<Range<text::Anchor>> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&()); let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id);
if cursor.seek(&Some(locator), Bias::Left, &()) { if cursor.seek(&Some(locator), Bias::Left) {
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
return Some(excerpt.range.context.clone()); return Some(excerpt.range.context.clone());
} }
@ -6285,7 +6276,7 @@ impl MultiBufferSnapshot {
fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&()); let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id); let locator = self.excerpt_locator_for_id(excerpt_id);
cursor.seek(&Some(locator), Bias::Left, &()); cursor.seek(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() { if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id { if excerpt.id == excerpt_id {
return Some(excerpt); return Some(excerpt);
@ -6333,7 +6324,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&()); let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id);
let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id);
cursor.seek(start_locator, Bias::Left, &()); cursor.seek(start_locator, Bias::Left);
cursor cursor
.take_while(move |excerpt| excerpt.locator <= *end_locator) .take_while(move |excerpt| excerpt.locator <= *end_locator)
.flat_map(move |excerpt| { .flat_map(move |excerpt| {
@ -6472,11 +6463,11 @@ where
fn seek(&mut self, position: &D) { fn seek(&mut self, position: &D) {
self.cached_region.take(); self.cached_region.take();
self.diff_transforms self.diff_transforms
.seek(&OutputDimension(*position), Bias::Right, &()); .seek(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none() if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0 && *position == self.diff_transforms.start().output_dimension.0
{ {
self.diff_transforms.prev(&()); self.diff_transforms.prev();
} }
let mut excerpt_position = self.diff_transforms.start().excerpt_dimension.0; let mut excerpt_position = self.diff_transforms.start().excerpt_dimension.0;
@ -6486,20 +6477,20 @@ where
} }
self.excerpts self.excerpts
.seek(&ExcerptDimension(excerpt_position), Bias::Right, &()); .seek(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 { if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
self.excerpts.prev(&()); self.excerpts.prev();
} }
} }
fn seek_forward(&mut self, position: &D) { fn seek_forward(&mut self, position: &D) {
self.cached_region.take(); self.cached_region.take();
self.diff_transforms self.diff_transforms
.seek_forward(&OutputDimension(*position), Bias::Right, &()); .seek_forward(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none() if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0 && *position == self.diff_transforms.start().output_dimension.0
{ {
self.diff_transforms.prev(&()); self.diff_transforms.prev();
} }
let overshoot = *position - self.diff_transforms.start().output_dimension.0; let overshoot = *position - self.diff_transforms.start().output_dimension.0;
@ -6509,31 +6500,30 @@ where
} }
self.excerpts self.excerpts
.seek_forward(&ExcerptDimension(excerpt_position), Bias::Right, &()); .seek_forward(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 { if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
self.excerpts.prev(&()); self.excerpts.prev();
} }
} }
fn next_excerpt(&mut self) { fn next_excerpt(&mut self) {
self.excerpts.next(&()); self.excerpts.next();
self.seek_to_start_of_current_excerpt(); self.seek_to_start_of_current_excerpt();
} }
fn prev_excerpt(&mut self) { fn prev_excerpt(&mut self) {
self.excerpts.prev(&()); self.excerpts.prev();
self.seek_to_start_of_current_excerpt(); self.seek_to_start_of_current_excerpt();
} }
fn seek_to_start_of_current_excerpt(&mut self) { fn seek_to_start_of_current_excerpt(&mut self) {
self.cached_region.take(); self.cached_region.take();
self.diff_transforms self.diff_transforms.seek(self.excerpts.start(), Bias::Left);
.seek(self.excerpts.start(), Bias::Left, &()); if self.diff_transforms.end().excerpt_dimension == *self.excerpts.start()
if self.diff_transforms.end(&()).excerpt_dimension == *self.excerpts.start()
&& self.diff_transforms.start().excerpt_dimension < *self.excerpts.start() && self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
&& self.diff_transforms.next_item().is_some() && self.diff_transforms.next_item().is_some()
{ {
self.diff_transforms.next(&()); self.diff_transforms.next();
} }
} }
@ -6541,18 +6531,18 @@ where
self.cached_region.take(); self.cached_region.take();
match self match self
.diff_transforms .diff_transforms
.end(&()) .end()
.excerpt_dimension .excerpt_dimension
.cmp(&self.excerpts.end(&())) .cmp(&self.excerpts.end())
{ {
cmp::Ordering::Less => self.diff_transforms.next(&()), cmp::Ordering::Less => self.diff_transforms.next(),
cmp::Ordering::Greater => self.excerpts.next(&()), cmp::Ordering::Greater => self.excerpts.next(),
cmp::Ordering::Equal => { cmp::Ordering::Equal => {
self.diff_transforms.next(&()); self.diff_transforms.next();
if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&()) if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none() || self.diff_transforms.item().is_none()
{ {
self.excerpts.next(&()); self.excerpts.next();
} else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) = } else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) =
self.diff_transforms.item() self.diff_transforms.item()
{ {
@ -6561,7 +6551,7 @@ where
.item() .item()
.map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id) .map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id)
{ {
self.excerpts.next(&()); self.excerpts.next();
} }
} }
} }
@ -6576,14 +6566,14 @@ where
.excerpt_dimension .excerpt_dimension
.cmp(self.excerpts.start()) .cmp(self.excerpts.start())
{ {
cmp::Ordering::Less => self.excerpts.prev(&()), cmp::Ordering::Less => self.excerpts.prev(),
cmp::Ordering::Greater => self.diff_transforms.prev(&()), cmp::Ordering::Greater => self.diff_transforms.prev(),
cmp::Ordering::Equal => { cmp::Ordering::Equal => {
self.diff_transforms.prev(&()); self.diff_transforms.prev();
if self.diff_transforms.start().excerpt_dimension < *self.excerpts.start() if self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
|| self.diff_transforms.item().is_none() || self.diff_transforms.item().is_none()
{ {
self.excerpts.prev(&()); self.excerpts.prev();
} }
} }
} }
@ -6603,9 +6593,9 @@ where
return true; return true;
} }
self.diff_transforms.prev(&()); self.diff_transforms.prev();
let prev_transform = self.diff_transforms.item(); let prev_transform = self.diff_transforms.item();
self.diff_transforms.next(&()); self.diff_transforms.next();
prev_transform.map_or(true, |next_transform| { prev_transform.map_or(true, |next_transform| {
matches!(next_transform, DiffTransform::BufferContent { .. }) matches!(next_transform, DiffTransform::BufferContent { .. })
@ -6613,9 +6603,9 @@ where
} }
fn is_at_end_of_excerpt(&mut self) -> bool { fn is_at_end_of_excerpt(&mut self) -> bool {
if self.diff_transforms.end(&()).excerpt_dimension < self.excerpts.end(&()) { if self.diff_transforms.end().excerpt_dimension < self.excerpts.end() {
return false; return false;
} else if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&()) } else if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none() || self.diff_transforms.item().is_none()
{ {
return true; return true;
@ -6636,7 +6626,7 @@ where
let buffer = &excerpt.buffer; let buffer = &excerpt.buffer;
let buffer_context_start = excerpt.range.context.start.summary::<D>(buffer); let buffer_context_start = excerpt.range.context.start.summary::<D>(buffer);
let mut buffer_start = buffer_context_start; let mut buffer_start = buffer_context_start;
let overshoot = self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0; let overshoot = self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
buffer_start.add_assign(&overshoot); buffer_start.add_assign(&overshoot);
Some(buffer_start) Some(buffer_start)
} }
@ -6659,7 +6649,7 @@ where
let mut buffer_end = buffer_start; let mut buffer_end = buffer_start;
buffer_end.add_assign(&buffer_range_len); buffer_end.add_assign(&buffer_range_len);
let start = self.diff_transforms.start().output_dimension.0; let start = self.diff_transforms.start().output_dimension.0;
let end = self.diff_transforms.end(&()).output_dimension.0; let end = self.diff_transforms.end().output_dimension.0;
return Some(MultiBufferRegion { return Some(MultiBufferRegion {
buffer, buffer,
excerpt, excerpt,
@ -6693,16 +6683,16 @@ where
let mut end; let mut end;
let mut buffer_end; let mut buffer_end;
let has_trailing_newline; let has_trailing_newline;
if self.diff_transforms.end(&()).excerpt_dimension.0 < self.excerpts.end(&()).0 { if self.diff_transforms.end().excerpt_dimension.0 < self.excerpts.end().0 {
let overshoot = let overshoot =
self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0; self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
end = self.diff_transforms.end(&()).output_dimension.0; end = self.diff_transforms.end().output_dimension.0;
buffer_end = buffer_context_start; buffer_end = buffer_context_start;
buffer_end.add_assign(&overshoot); buffer_end.add_assign(&overshoot);
has_trailing_newline = false; has_trailing_newline = false;
} else { } else {
let overshoot = let overshoot =
self.excerpts.end(&()).0 - self.diff_transforms.start().excerpt_dimension.0; self.excerpts.end().0 - self.diff_transforms.start().excerpt_dimension.0;
end = self.diff_transforms.start().output_dimension.0; end = self.diff_transforms.start().output_dimension.0;
end.add_assign(&overshoot); end.add_assign(&overshoot);
buffer_end = excerpt.range.context.end.summary::<D>(buffer); buffer_end = excerpt.range.context.end.summary::<D>(buffer);
@ -7086,11 +7076,11 @@ impl<'a> MultiBufferExcerpt<'a> {
/// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`]
pub fn map_range_to_buffer(&mut self, range: Range<usize>) -> Range<usize> { pub fn map_range_to_buffer(&mut self, range: Range<usize>) -> Range<usize> {
self.diff_transforms self.diff_transforms
.seek(&OutputDimension(range.start), Bias::Right, &()); .seek(&OutputDimension(range.start), Bias::Right);
let start = self.map_offset_to_buffer_internal(range.start); let start = self.map_offset_to_buffer_internal(range.start);
let end = if range.end > range.start { let end = if range.end > range.start {
self.diff_transforms self.diff_transforms
.seek_forward(&OutputDimension(range.end), Bias::Right, &()); .seek_forward(&OutputDimension(range.end), Bias::Right);
self.map_offset_to_buffer_internal(range.end) self.map_offset_to_buffer_internal(range.end)
} else { } else {
start start
@ -7123,7 +7113,7 @@ impl<'a> MultiBufferExcerpt<'a> {
} }
let overshoot = buffer_range.start - self.buffer_offset; let overshoot = buffer_range.start - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot); let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
self.diff_transforms.seek(&excerpt_offset, Bias::Right, &()); self.diff_transforms.seek(&excerpt_offset, Bias::Right);
if excerpt_offset.0 < self.diff_transforms.start().excerpt_dimension.0 { if excerpt_offset.0 < self.diff_transforms.start().excerpt_dimension.0 {
log::warn!( log::warn!(
"Attempting to map a range from a buffer offset that starts before the current buffer offset" "Attempting to map a range from a buffer offset that starts before the current buffer offset"
@ -7137,7 +7127,7 @@ impl<'a> MultiBufferExcerpt<'a> {
let overshoot = buffer_range.end - self.buffer_offset; let overshoot = buffer_range.end - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot); let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
self.diff_transforms self.diff_transforms
.seek_forward(&excerpt_offset, Bias::Right, &()); .seek_forward(&excerpt_offset, Bias::Right);
let overshoot = excerpt_offset.0 - self.diff_transforms.start().excerpt_dimension.0; let overshoot = excerpt_offset.0 - self.diff_transforms.start().excerpt_dimension.0;
self.diff_transforms.start().output_dimension.0 + overshoot self.diff_transforms.start().output_dimension.0 + overshoot
} else { } else {
@ -7509,7 +7499,7 @@ impl Iterator for MultiBufferRows<'_> {
if let Some(next_region) = self.cursor.region() { if let Some(next_region) = self.cursor.region() {
region = next_region; region = next_region;
} else { } else {
if self.point == self.cursor.diff_transforms.end(&()).output_dimension.0 { if self.point == self.cursor.diff_transforms.end().output_dimension.0 {
let multibuffer_row = MultiBufferRow(self.point.row); let multibuffer_row = MultiBufferRow(self.point.row);
let last_excerpt = self let last_excerpt = self
.cursor .cursor
@ -7615,14 +7605,14 @@ impl<'a> MultiBufferChunks<'a> {
} }
pub fn seek(&mut self, range: Range<usize>) { pub fn seek(&mut self, range: Range<usize>) {
self.diff_transforms.seek(&range.end, Bias::Right, &()); self.diff_transforms.seek(&range.end, Bias::Right);
let mut excerpt_end = self.diff_transforms.start().1; let mut excerpt_end = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.end - self.diff_transforms.start().0; let overshoot = range.end - self.diff_transforms.start().0;
excerpt_end.value += overshoot; excerpt_end.value += overshoot;
} }
self.diff_transforms.seek(&range.start, Bias::Right, &()); self.diff_transforms.seek(&range.start, Bias::Right);
let mut excerpt_start = self.diff_transforms.start().1; let mut excerpt_start = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.start - self.diff_transforms.start().0; let overshoot = range.start - self.diff_transforms.start().0;
@ -7636,7 +7626,7 @@ impl<'a> MultiBufferChunks<'a> {
fn seek_to_excerpt_offset_range(&mut self, new_range: Range<ExcerptOffset>) { fn seek_to_excerpt_offset_range(&mut self, new_range: Range<ExcerptOffset>) {
self.excerpt_offset_range = new_range.clone(); self.excerpt_offset_range = new_range.clone();
self.excerpts.seek(&new_range.start, Bias::Right, &()); self.excerpts.seek(&new_range.start, Bias::Right);
if let Some(excerpt) = self.excerpts.item() { if let Some(excerpt) = self.excerpts.item() {
let excerpt_start = *self.excerpts.start(); let excerpt_start = *self.excerpts.start();
if let Some(excerpt_chunks) = self if let Some(excerpt_chunks) = self
@ -7669,7 +7659,7 @@ impl<'a> MultiBufferChunks<'a> {
self.excerpt_offset_range.start.value += chunk.text.len(); self.excerpt_offset_range.start.value += chunk.text.len();
return Some(chunk); return Some(chunk);
} else { } else {
self.excerpts.next(&()); self.excerpts.next();
let excerpt = self.excerpts.item()?; let excerpt = self.excerpts.item()?;
self.excerpt_chunks = Some(excerpt.chunks_in_range( self.excerpt_chunks = Some(excerpt.chunks_in_range(
0..(self.excerpt_offset_range.end - *self.excerpts.start()).value, 0..(self.excerpt_offset_range.end - *self.excerpts.start()).value,
@ -7712,12 +7702,12 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
if self.range.start >= self.range.end { if self.range.start >= self.range.end {
return None; return None;
} }
if self.range.start == self.diff_transforms.end(&()).0 { if self.range.start == self.diff_transforms.end().0 {
self.diff_transforms.next(&()); self.diff_transforms.next();
} }
let diff_transform_start = self.diff_transforms.start().0; let diff_transform_start = self.diff_transforms.start().0;
let diff_transform_end = self.diff_transforms.end(&()).0; let diff_transform_end = self.diff_transforms.end().0;
debug_assert!(self.range.start < diff_transform_end); debug_assert!(self.range.start < diff_transform_end);
let diff_transform = self.diff_transforms.item()?; let diff_transform = self.diff_transforms.item()?;

View file

@ -132,12 +132,12 @@ impl NotificationStore {
} }
let ix = count - 1 - ix; let ix = count - 1 - ix;
let mut cursor = self.notifications.cursor::<Count>(&()); let mut cursor = self.notifications.cursor::<Count>(&());
cursor.seek(&Count(ix), Bias::Right, &()); cursor.seek(&Count(ix), Bias::Right);
cursor.item() cursor.item()
} }
pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> {
let mut cursor = self.notifications.cursor::<NotificationId>(&()); let mut cursor = self.notifications.cursor::<NotificationId>(&());
cursor.seek(&NotificationId(id), Bias::Left, &()); cursor.seek(&NotificationId(id), Bias::Left);
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
if item.id == id { if item.id == id {
return Some(item); return Some(item);
@ -365,7 +365,7 @@ impl NotificationStore {
let mut old_range = 0..0; let mut old_range = 0..0;
for (i, (id, new_notification)) in notifications.into_iter().enumerate() { for (i, (id, new_notification)) in notifications.into_iter().enumerate() {
new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left, &()), &()); new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left), &());
if i == 0 { if i == 0 {
old_range.start = cursor.start().1.0; old_range.start = cursor.start().1.0;
@ -374,7 +374,7 @@ impl NotificationStore {
let old_notification = cursor.item(); let old_notification = cursor.item();
if let Some(old_notification) = old_notification { if let Some(old_notification) = old_notification {
if old_notification.id == id { if old_notification.id == id {
cursor.next(&()); cursor.next();
if let Some(new_notification) = &new_notification { if let Some(new_notification) = &new_notification {
if new_notification.is_read { if new_notification.is_read {
@ -403,7 +403,7 @@ impl NotificationStore {
old_range.end = cursor.start().1.0; old_range.end = cursor.start().1.0;
let new_count = new_notifications.summary().count - old_range.start; let new_count = new_notifications.summary().count - old_range.start;
new_notifications.append(cursor.suffix(&()), &()); new_notifications.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.notifications = new_notifications; self.notifications = new_notifications;

View file

@ -1 +1 @@
../../../LICENSE-GPL ../../LICENSE-GPL

View file

@ -4279,7 +4279,7 @@ impl Repository {
for (repo_path, status) in &*statuses.entries { for (repo_path, status) in &*statuses.entries {
changed_paths.remove(repo_path); changed_paths.remove(repo_path);
if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) { if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
if cursor.item().is_some_and(|entry| entry.status == *status) { if cursor.item().is_some_and(|entry| entry.status == *status) {
continue; continue;
} }
@ -4292,7 +4292,7 @@ impl Repository {
} }
let mut cursor = prev_statuses.cursor::<PathProgress>(&()); let mut cursor = prev_statuses.cursor::<PathProgress>(&());
for path in changed_paths.into_iter() { for path in changed_paths.into_iter() {
if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) { if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
changed_path_statuses.push(Edit::Remove(PathKey(path.0))); changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
} }
} }

View file

@ -72,14 +72,13 @@ impl<'a> GitTraversal<'a> {
if entry.is_dir() { if entry.is_dir() {
let mut statuses = statuses.clone(); let mut statuses = statuses.clone();
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()); statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left);
let summary = let summary = statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left);
statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
self.current_entry_summary = Some(summary); self.current_entry_summary = Some(summary);
} else if entry.is_file() { } else if entry.is_file() {
// For a file entry, park the cursor on the corresponding status // For a file entry, park the cursor on the corresponding status
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) { if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left) {
// TODO: Investigate statuses.item() being None here. // TODO: Investigate statuses.item() being None here.
self.current_entry_summary = statuses.item().map(|item| item.status.into()); self.current_entry_summary = statuses.item().map(|item| item.status.into());
} else { } else {

View file

@ -1274,15 +1274,11 @@ impl LocalLspStore {
// grouped with the previous transaction in the history // grouped with the previous transaction in the history
// based on the transaction group interval // based on the transaction group interval
buffer.finalize_last_transaction(); buffer.finalize_last_transaction();
let transaction_id = buffer buffer
.start_transaction() .start_transaction()
.context("transaction already open")?; .context("transaction already open")?;
let transaction = buffer
.get_transaction(transaction_id)
.expect("transaction started")
.clone();
buffer.end_transaction(cx); buffer.end_transaction(cx);
buffer.push_transaction(transaction, cx.background_executor().now()); let transaction_id = buffer.push_empty_transaction(cx.background_executor().now());
buffer.finalize_last_transaction(); buffer.finalize_last_transaction();
anyhow::Ok(transaction_id) anyhow::Ok(transaction_id)
})??; })??;

View file

@ -45,12 +45,6 @@ impl SearchHistory {
} }
pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) { pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) {
if let Some(selected_ix) = cursor.selection {
if self.history.get(selected_ix) == Some(&search_string) {
return;
}
}
if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains { if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains {
if let Some(previously_searched) = self.history.back_mut() { if let Some(previously_searched) = self.history.back_mut() {
if search_string.contains(previously_searched.as_str()) { if search_string.contains(previously_searched.as_str()) {
@ -144,6 +138,14 @@ mod tests {
); );
assert_eq!(search_history.current(&cursor), Some("rustlang")); assert_eq!(search_history.current(&cursor), Some("rustlang"));
// add item when it equals to current item if it's not the last one
search_history.add(&mut cursor, "php".to_string());
search_history.previous(&mut cursor);
assert_eq!(search_history.current(&cursor), Some("rustlang"));
search_history.add(&mut cursor, "rustlang".to_string());
assert_eq!(search_history.history.len(), 3, "Should add item");
assert_eq!(search_history.current(&cursor), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT // push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..MAX_HISTORY_LEN * 2 { for i in 0..MAX_HISTORY_LEN * 2 {
search_history.add(&mut cursor, format!("item{i}")); search_history.add(&mut cursor, format!("item{i}"));

View file

@ -41,9 +41,9 @@ impl Rope {
self.push_chunk(chunk.as_slice()); self.push_chunk(chunk.as_slice());
let mut chunks = rope.chunks.cursor::<()>(&()); let mut chunks = rope.chunks.cursor::<()>(&());
chunks.next(&()); chunks.next();
chunks.next(&()); chunks.next();
self.chunks.append(chunks.suffix(&()), &()); self.chunks.append(chunks.suffix(), &());
self.check_invariants(); self.check_invariants();
return; return;
} }
@ -283,7 +283,7 @@ impl Rope {
return self.summary().len_utf16; return self.summary().len_utf16;
} }
let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&()); let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| { + cursor.item().map_or(Default::default(), |chunk| {
@ -296,7 +296,7 @@ impl Rope {
return self.summary().len; return self.summary().len;
} }
let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&()); let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| { + cursor.item().map_or(Default::default(), |chunk| {
@ -309,7 +309,7 @@ impl Rope {
return self.summary().lines; return self.summary().lines;
} }
let mut cursor = self.chunks.cursor::<(usize, Point)>(&()); let mut cursor = self.chunks.cursor::<(usize, Point)>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| { + cursor.item().map_or(Point::zero(), |chunk| {
@ -322,7 +322,7 @@ impl Rope {
return self.summary().lines_utf16(); return self.summary().lines_utf16();
} }
let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&()); let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0; let overshoot = offset - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| { + cursor.item().map_or(PointUtf16::zero(), |chunk| {
@ -335,7 +335,7 @@ impl Rope {
return self.summary().lines_utf16(); return self.summary().lines_utf16();
} }
let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&()); let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&());
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0; let overshoot = point - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| { + cursor.item().map_or(PointUtf16::zero(), |chunk| {
@ -348,7 +348,7 @@ impl Rope {
return self.summary().len; return self.summary().len;
} }
let mut cursor = self.chunks.cursor::<(Point, usize)>(&()); let mut cursor = self.chunks.cursor::<(Point, usize)>(&());
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0; let overshoot = point - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor + cursor
@ -369,7 +369,7 @@ impl Rope {
return self.summary().len; return self.summary().len;
} }
let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&()); let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&());
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0; let overshoot = point - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor.item().map_or(0, |chunk| { + cursor.item().map_or(0, |chunk| {
@ -382,7 +382,7 @@ impl Rope {
return self.summary().lines; return self.summary().lines;
} }
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&()); let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&());
cursor.seek(&point.0, Bias::Left, &()); cursor.seek(&point.0, Bias::Left);
let overshoot = Unclipped(point.0 - cursor.start().0); let overshoot = Unclipped(point.0 - cursor.start().0);
cursor.start().1 cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| { + cursor.item().map_or(Point::zero(), |chunk| {
@ -392,7 +392,7 @@ impl Rope {
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
let mut cursor = self.chunks.cursor::<usize>(&()); let mut cursor = self.chunks.cursor::<usize>(&());
cursor.seek(&offset, Bias::Left, &()); cursor.seek(&offset, Bias::Left);
if let Some(chunk) = cursor.item() { if let Some(chunk) = cursor.item() {
let mut ix = offset - cursor.start(); let mut ix = offset - cursor.start();
while !chunk.text.is_char_boundary(ix) { while !chunk.text.is_char_boundary(ix) {
@ -415,7 +415,7 @@ impl Rope {
pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 { pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut cursor = self.chunks.cursor::<OffsetUtf16>(&()); let mut cursor = self.chunks.cursor::<OffsetUtf16>(&());
cursor.seek(&offset, Bias::Right, &()); cursor.seek(&offset, Bias::Right);
if let Some(chunk) = cursor.item() { if let Some(chunk) = cursor.item() {
let overshoot = offset - cursor.start(); let overshoot = offset - cursor.start();
*cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias) *cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias)
@ -426,7 +426,7 @@ impl Rope {
pub fn clip_point(&self, point: Point, bias: Bias) -> Point { pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
let mut cursor = self.chunks.cursor::<Point>(&()); let mut cursor = self.chunks.cursor::<Point>(&());
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point, Bias::Right);
if let Some(chunk) = cursor.item() { if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.start(); let overshoot = point - cursor.start();
*cursor.start() + chunk.as_slice().clip_point(overshoot, bias) *cursor.start() + chunk.as_slice().clip_point(overshoot, bias)
@ -437,7 +437,7 @@ impl Rope {
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 { pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
let mut cursor = self.chunks.cursor::<PointUtf16>(&()); let mut cursor = self.chunks.cursor::<PointUtf16>(&());
cursor.seek(&point.0, Bias::Right, &()); cursor.seek(&point.0, Bias::Right);
if let Some(chunk) = cursor.item() { if let Some(chunk) = cursor.item() {
let overshoot = Unclipped(point.0 - cursor.start()); let overshoot = Unclipped(point.0 - cursor.start());
*cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias) *cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias)
@ -450,10 +450,6 @@ impl Rope {
self.clip_point(Point::new(row, u32::MAX), Bias::Left) self.clip_point(Point::new(row, u32::MAX), Bias::Left)
.column .column
} }
pub fn ptr_eq(&self, other: &Self) -> bool {
self.chunks.ptr_eq(&other.chunks)
}
} }
impl<'a> From<&'a str> for Rope { impl<'a> From<&'a str> for Rope {
@ -514,7 +510,7 @@ pub struct Cursor<'a> {
impl<'a> Cursor<'a> { impl<'a> Cursor<'a> {
pub fn new(rope: &'a Rope, offset: usize) -> Self { pub fn new(rope: &'a Rope, offset: usize) -> Self {
let mut chunks = rope.chunks.cursor(&()); let mut chunks = rope.chunks.cursor(&());
chunks.seek(&offset, Bias::Right, &()); chunks.seek(&offset, Bias::Right);
Self { Self {
rope, rope,
chunks, chunks,
@ -525,7 +521,7 @@ impl<'a> Cursor<'a> {
pub fn seek_forward(&mut self, end_offset: usize) { pub fn seek_forward(&mut self, end_offset: usize) {
debug_assert!(end_offset >= self.offset); debug_assert!(end_offset >= self.offset);
self.chunks.seek_forward(&end_offset, Bias::Right, &()); self.chunks.seek_forward(&end_offset, Bias::Right);
self.offset = end_offset; self.offset = end_offset;
} }
@ -540,14 +536,14 @@ impl<'a> Cursor<'a> {
let mut slice = Rope::new(); let mut slice = Rope::new();
if let Some(start_chunk) = self.chunks.item() { if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start(); let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
slice.push_chunk(start_chunk.slice(start_ix..end_ix)); slice.push_chunk(start_chunk.slice(start_ix..end_ix));
} }
if end_offset > self.chunks.end(&()) { if end_offset > self.chunks.end() {
self.chunks.next(&()); self.chunks.next();
slice.append(Rope { slice.append(Rope {
chunks: self.chunks.slice(&end_offset, Bias::Right, &()), chunks: self.chunks.slice(&end_offset, Bias::Right),
}); });
if let Some(end_chunk) = self.chunks.item() { if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start(); let end_ix = end_offset - self.chunks.start();
@ -565,13 +561,13 @@ impl<'a> Cursor<'a> {
let mut summary = D::zero(&()); let mut summary = D::zero(&());
if let Some(start_chunk) = self.chunks.item() { if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start(); let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix))); summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix)));
} }
if end_offset > self.chunks.end(&()) { if end_offset > self.chunks.end() {
self.chunks.next(&()); self.chunks.next();
summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &())); summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right));
if let Some(end_chunk) = self.chunks.item() { if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start(); let end_ix = end_offset - self.chunks.start();
summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix))); summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix)));
@ -603,10 +599,10 @@ impl<'a> Chunks<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self { pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&()); let mut chunks = rope.chunks.cursor(&());
let offset = if reversed { let offset = if reversed {
chunks.seek(&range.end, Bias::Left, &()); chunks.seek(&range.end, Bias::Left);
range.end range.end
} else { } else {
chunks.seek(&range.start, Bias::Right, &()); chunks.seek(&range.start, Bias::Right);
range.start range.start
}; };
Self { Self {
@ -642,10 +638,10 @@ impl<'a> Chunks<'a> {
Bias::Right Bias::Right
}; };
if offset >= self.chunks.end(&()) { if offset >= self.chunks.end() {
self.chunks.seek_forward(&offset, bias, &()); self.chunks.seek_forward(&offset, bias);
} else { } else {
self.chunks.seek(&offset, bias, &()); self.chunks.seek(&offset, bias);
} }
self.offset = offset; self.offset = offset;
@ -674,25 +670,25 @@ impl<'a> Chunks<'a> {
found = self.offset <= self.range.end; found = self.offset <= self.range.end;
} else { } else {
self.chunks self.chunks
.search_forward(|summary| summary.text.lines.row > 0, &()); .search_forward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start(); self.offset = *self.chunks.start();
if let Some(newline_ix) = self.peek().and_then(|chunk| chunk.find('\n')) { if let Some(newline_ix) = self.peek().and_then(|chunk| chunk.find('\n')) {
self.offset += newline_ix + 1; self.offset += newline_ix + 1;
found = self.offset <= self.range.end; found = self.offset <= self.range.end;
} else { } else {
self.offset = self.chunks.end(&()); self.offset = self.chunks.end();
} }
} }
if self.offset == self.chunks.end(&()) { if self.offset == self.chunks.end() {
self.next(); self.next();
} }
} }
if self.offset > self.range.end { if self.offset > self.range.end {
self.offset = cmp::min(self.offset, self.range.end); self.offset = cmp::min(self.offset, self.range.end);
self.chunks.seek(&self.offset, Bias::Right, &()); self.chunks.seek(&self.offset, Bias::Right);
} }
found found
@ -711,7 +707,7 @@ impl<'a> Chunks<'a> {
let initial_offset = self.offset; let initial_offset = self.offset;
if self.offset == *self.chunks.start() { if self.offset == *self.chunks.start() {
self.chunks.prev(&()); self.chunks.prev();
} }
if let Some(chunk) = self.chunks.item() { if let Some(chunk) = self.chunks.item() {
@ -729,14 +725,14 @@ impl<'a> Chunks<'a> {
} }
self.chunks self.chunks
.search_backward(|summary| summary.text.lines.row > 0, &()); .search_backward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start(); self.offset = *self.chunks.start();
if let Some(chunk) = self.chunks.item() { if let Some(chunk) = self.chunks.item() {
if let Some(newline_ix) = chunk.text.rfind('\n') { if let Some(newline_ix) = chunk.text.rfind('\n') {
self.offset += newline_ix + 1; self.offset += newline_ix + 1;
if self.offset_is_valid() { if self.offset_is_valid() {
if self.offset == self.chunks.end(&()) { if self.offset == self.chunks.end() {
self.chunks.next(&()); self.chunks.next();
} }
return true; return true;
@ -746,7 +742,7 @@ impl<'a> Chunks<'a> {
if !self.offset_is_valid() || self.chunks.item().is_none() { if !self.offset_is_valid() || self.chunks.item().is_none() {
self.offset = self.range.start; self.offset = self.range.start;
self.chunks.seek(&self.offset, Bias::Right, &()); self.chunks.seek(&self.offset, Bias::Right);
} }
self.offset < initial_offset && self.offset == 0 self.offset < initial_offset && self.offset == 0
@ -765,7 +761,7 @@ impl<'a> Chunks<'a> {
slice_start..slice_end slice_start..slice_end
} else { } else {
let slice_start = self.offset - chunk_start; let slice_start = self.offset - chunk_start;
let slice_end = cmp::min(self.chunks.end(&()), self.range.end) - chunk_start; let slice_end = cmp::min(self.chunks.end(), self.range.end) - chunk_start;
slice_start..slice_end slice_start..slice_end
}; };
@ -825,12 +821,12 @@ impl<'a> Iterator for Chunks<'a> {
if self.reversed { if self.reversed {
self.offset -= chunk.len(); self.offset -= chunk.len();
if self.offset <= *self.chunks.start() { if self.offset <= *self.chunks.start() {
self.chunks.prev(&()); self.chunks.prev();
} }
} else { } else {
self.offset += chunk.len(); self.offset += chunk.len();
if self.offset >= self.chunks.end(&()) { if self.offset >= self.chunks.end() {
self.chunks.next(&()); self.chunks.next();
} }
} }
@ -848,9 +844,9 @@ impl<'a> Bytes<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self { pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&()); let mut chunks = rope.chunks.cursor(&());
if reversed { if reversed {
chunks.seek(&range.end, Bias::Left, &()); chunks.seek(&range.end, Bias::Left);
} else { } else {
chunks.seek(&range.start, Bias::Right, &()); chunks.seek(&range.start, Bias::Right);
} }
Self { Self {
chunks, chunks,
@ -861,7 +857,7 @@ impl<'a> Bytes<'a> {
pub fn peek(&self) -> Option<&'a [u8]> { pub fn peek(&self) -> Option<&'a [u8]> {
let chunk = self.chunks.item()?; let chunk = self.chunks.item()?;
if self.reversed && self.range.start >= self.chunks.end(&()) { if self.reversed && self.range.start >= self.chunks.end() {
return None; return None;
} }
let chunk_start = *self.chunks.start(); let chunk_start = *self.chunks.start();
@ -881,9 +877,9 @@ impl<'a> Iterator for Bytes<'a> {
let result = self.peek(); let result = self.peek();
if result.is_some() { if result.is_some() {
if self.reversed { if self.reversed {
self.chunks.prev(&()); self.chunks.prev();
} else { } else {
self.chunks.next(&()); self.chunks.next();
} }
} }
result result
@ -905,9 +901,9 @@ impl io::Read for Bytes<'_> {
if len == chunk.len() { if len == chunk.len() {
if self.reversed { if self.reversed {
self.chunks.prev(&()); self.chunks.prev();
} else { } else {
self.chunks.next(&()); self.chunks.next();
} }
} }
Ok(len) Ok(len)

View file

@ -2784,6 +2784,7 @@ impl KeystrokeInput {
else { else {
log::trace!("No keybinding to stop recording keystrokes in keystroke input"); log::trace!("No keybinding to stop recording keystrokes in keystroke input");
self.close_keystrokes.take(); self.close_keystrokes.take();
self.close_keystrokes_start.take();
return CloseKeystrokeResult::None; return CloseKeystrokeResult::None;
}; };
let action_keystrokes = keybind_for_close_action.keystrokes(); let action_keystrokes = keybind_for_close_action.keystrokes();
@ -2976,7 +2977,9 @@ impl KeystrokeInput {
return; return;
} }
window.focus(&self.outer_focus_handle); window.focus(&self.outer_focus_handle);
if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() { if let Some(close_keystrokes_start) = self.close_keystrokes_start.take()
&& close_keystrokes_start < self.keystrokes.len()
{
self.keystrokes.drain(close_keystrokes_start..); self.keystrokes.drain(close_keystrokes_start..);
} }
self.close_keystrokes.take(); self.close_keystrokes.take();

View file

@ -25,6 +25,7 @@ pub struct Cursor<'a, T: Item, D> {
position: D, position: D,
did_seek: bool, did_seek: bool,
at_end: bool, at_end: bool,
cx: &'a <T::Summary as Summary>::Context,
} }
impl<T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'_, T, D> impl<T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'_, T, D>
@ -52,21 +53,22 @@ where
T: Item, T: Item,
D: Dimension<'a, T::Summary>, D: Dimension<'a, T::Summary>,
{ {
pub fn new(tree: &'a SumTree<T>, cx: &<T::Summary as Summary>::Context) -> Self { pub fn new(tree: &'a SumTree<T>, cx: &'a <T::Summary as Summary>::Context) -> Self {
Self { Self {
tree, tree,
stack: ArrayVec::new(), stack: ArrayVec::new(),
position: D::zero(cx), position: D::zero(cx),
did_seek: false, did_seek: false,
at_end: tree.is_empty(), at_end: tree.is_empty(),
cx,
} }
} }
fn reset(&mut self, cx: &<T::Summary as Summary>::Context) { fn reset(&mut self) {
self.did_seek = false; self.did_seek = false;
self.at_end = self.tree.is_empty(); self.at_end = self.tree.is_empty();
self.stack.truncate(0); self.stack.truncate(0);
self.position = D::zero(cx); self.position = D::zero(self.cx);
} }
pub fn start(&self) -> &D { pub fn start(&self) -> &D {
@ -74,10 +76,10 @@ where
} }
#[track_caller] #[track_caller]
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D { pub fn end(&self) -> D {
if let Some(item_summary) = self.item_summary() { if let Some(item_summary) = self.item_summary() {
let mut end = self.start().clone(); let mut end = self.start().clone();
end.add_summary(item_summary, cx); end.add_summary(item_summary, self.cx);
end end
} else { } else {
self.start().clone() self.start().clone()
@ -202,12 +204,12 @@ where
} }
#[track_caller] #[track_caller]
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) { pub fn prev(&mut self) {
self.search_backward(|_| true, cx) self.search_backward(|_| true)
} }
#[track_caller] #[track_caller]
pub fn search_backward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context) pub fn search_backward<F>(&mut self, mut filter_node: F)
where where
F: FnMut(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,
{ {
@ -217,13 +219,13 @@ where
} }
if self.at_end { if self.at_end {
self.position = D::zero(cx); self.position = D::zero(self.cx);
self.at_end = self.tree.is_empty(); self.at_end = self.tree.is_empty();
if !self.tree.is_empty() { if !self.tree.is_empty() {
self.stack.push(StackEntry { self.stack.push(StackEntry {
tree: self.tree, tree: self.tree,
index: self.tree.0.child_summaries().len(), index: self.tree.0.child_summaries().len(),
position: D::from_summary(self.tree.summary(), cx), position: D::from_summary(self.tree.summary(), self.cx),
}); });
} }
} }
@ -233,7 +235,7 @@ where
if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) { if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) {
self.position = position.clone(); self.position = position.clone();
} else { } else {
self.position = D::zero(cx); self.position = D::zero(self.cx);
} }
let entry = self.stack.last_mut().unwrap(); let entry = self.stack.last_mut().unwrap();
@ -247,7 +249,7 @@ where
} }
for summary in &entry.tree.0.child_summaries()[..entry.index] { for summary in &entry.tree.0.child_summaries()[..entry.index] {
self.position.add_summary(summary, cx); self.position.add_summary(summary, self.cx);
} }
entry.position = self.position.clone(); entry.position = self.position.clone();
@ -257,7 +259,7 @@ where
if descending { if descending {
let tree = &child_trees[entry.index]; let tree = &child_trees[entry.index];
self.stack.push(StackEntry { self.stack.push(StackEntry {
position: D::zero(cx), position: D::zero(self.cx),
tree, tree,
index: tree.0.child_summaries().len() - 1, index: tree.0.child_summaries().len() - 1,
}) })
@ -273,12 +275,12 @@ where
} }
#[track_caller] #[track_caller]
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) { pub fn next(&mut self) {
self.search_forward(|_| true, cx) self.search_forward(|_| true)
} }
#[track_caller] #[track_caller]
pub fn search_forward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context) pub fn search_forward<F>(&mut self, mut filter_node: F)
where where
F: FnMut(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,
{ {
@ -289,7 +291,7 @@ where
self.stack.push(StackEntry { self.stack.push(StackEntry {
tree: self.tree, tree: self.tree,
index: 0, index: 0,
position: D::zero(cx), position: D::zero(self.cx),
}); });
descend = true; descend = true;
} }
@ -316,8 +318,8 @@ where
break; break;
} else { } else {
entry.index += 1; entry.index += 1;
entry.position.add_summary(next_summary, cx); entry.position.add_summary(next_summary, self.cx);
self.position.add_summary(next_summary, cx); self.position.add_summary(next_summary, self.cx);
} }
} }
@ -327,8 +329,8 @@ where
if !descend { if !descend {
let item_summary = &item_summaries[entry.index]; let item_summary = &item_summaries[entry.index];
entry.index += 1; entry.index += 1;
entry.position.add_summary(item_summary, cx); entry.position.add_summary(item_summary, self.cx);
self.position.add_summary(item_summary, cx); self.position.add_summary(item_summary, self.cx);
} }
loop { loop {
@ -337,8 +339,8 @@ where
return; return;
} else { } else {
entry.index += 1; entry.index += 1;
entry.position.add_summary(next_item_summary, cx); entry.position.add_summary(next_item_summary, self.cx);
self.position.add_summary(next_item_summary, cx); self.position.add_summary(next_item_summary, self.cx);
} }
} else { } else {
break None; break None;
@ -380,71 +382,51 @@ where
D: Dimension<'a, T::Summary>, D: Dimension<'a, T::Summary>,
{ {
#[track_caller] #[track_caller]
pub fn seek<Target>( pub fn seek<Target>(&mut self, pos: &Target, bias: Bias) -> bool
&mut self,
pos: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> bool
where where
Target: SeekTarget<'a, T::Summary, D>, Target: SeekTarget<'a, T::Summary, D>,
{ {
self.reset(cx); self.reset();
self.seek_internal(pos, bias, &mut (), cx) self.seek_internal(pos, bias, &mut ())
} }
#[track_caller] #[track_caller]
pub fn seek_forward<Target>( pub fn seek_forward<Target>(&mut self, pos: &Target, bias: Bias) -> bool
&mut self,
pos: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> bool
where where
Target: SeekTarget<'a, T::Summary, D>, Target: SeekTarget<'a, T::Summary, D>,
{ {
self.seek_internal(pos, bias, &mut (), cx) self.seek_internal(pos, bias, &mut ())
} }
/// Advances the cursor and returns traversed items as a tree. /// Advances the cursor and returns traversed items as a tree.
#[track_caller] #[track_caller]
pub fn slice<Target>( pub fn slice<Target>(&mut self, end: &Target, bias: Bias) -> SumTree<T>
&mut self,
end: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> SumTree<T>
where where
Target: SeekTarget<'a, T::Summary, D>, Target: SeekTarget<'a, T::Summary, D>,
{ {
let mut slice = SliceSeekAggregate { let mut slice = SliceSeekAggregate {
tree: SumTree::new(cx), tree: SumTree::new(self.cx),
leaf_items: ArrayVec::new(), leaf_items: ArrayVec::new(),
leaf_item_summaries: ArrayVec::new(), leaf_item_summaries: ArrayVec::new(),
leaf_summary: <T::Summary as Summary>::zero(cx), leaf_summary: <T::Summary as Summary>::zero(self.cx),
}; };
self.seek_internal(end, bias, &mut slice, cx); self.seek_internal(end, bias, &mut slice);
slice.tree slice.tree
} }
#[track_caller] #[track_caller]
pub fn suffix(&mut self, cx: &<T::Summary as Summary>::Context) -> SumTree<T> { pub fn suffix(&mut self) -> SumTree<T> {
self.slice(&End::new(), Bias::Right, cx) self.slice(&End::new(), Bias::Right)
} }
#[track_caller] #[track_caller]
pub fn summary<Target, Output>( pub fn summary<Target, Output>(&mut self, end: &Target, bias: Bias) -> Output
&mut self,
end: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> Output
where where
Target: SeekTarget<'a, T::Summary, D>, Target: SeekTarget<'a, T::Summary, D>,
Output: Dimension<'a, T::Summary>, Output: Dimension<'a, T::Summary>,
{ {
let mut summary = SummarySeekAggregate(Output::zero(cx)); let mut summary = SummarySeekAggregate(Output::zero(self.cx));
self.seek_internal(end, bias, &mut summary, cx); self.seek_internal(end, bias, &mut summary);
summary.0 summary.0
} }
@ -455,10 +437,9 @@ where
target: &dyn SeekTarget<'a, T::Summary, D>, target: &dyn SeekTarget<'a, T::Summary, D>,
bias: Bias, bias: Bias,
aggregate: &mut dyn SeekAggregate<'a, T>, aggregate: &mut dyn SeekAggregate<'a, T>,
cx: &<T::Summary as Summary>::Context,
) -> bool { ) -> bool {
assert!( assert!(
target.cmp(&self.position, cx) >= Ordering::Equal, target.cmp(&self.position, self.cx) >= Ordering::Equal,
"cannot seek backward", "cannot seek backward",
); );
@ -467,7 +448,7 @@ where
self.stack.push(StackEntry { self.stack.push(StackEntry {
tree: self.tree, tree: self.tree,
index: 0, index: 0,
position: D::zero(cx), position: D::zero(self.cx),
}); });
} }
@ -489,14 +470,14 @@ where
.zip(&child_summaries[entry.index..]) .zip(&child_summaries[entry.index..])
{ {
let mut child_end = self.position.clone(); let mut child_end = self.position.clone();
child_end.add_summary(child_summary, cx); child_end.add_summary(child_summary, self.cx);
let comparison = target.cmp(&child_end, cx); let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right) || (comparison == Ordering::Equal && bias == Bias::Right)
{ {
self.position = child_end; self.position = child_end;
aggregate.push_tree(child_tree, child_summary, cx); aggregate.push_tree(child_tree, child_summary, self.cx);
entry.index += 1; entry.index += 1;
entry.position = self.position.clone(); entry.position = self.position.clone();
} else { } else {
@ -522,22 +503,22 @@ where
.zip(&item_summaries[entry.index..]) .zip(&item_summaries[entry.index..])
{ {
let mut child_end = self.position.clone(); let mut child_end = self.position.clone();
child_end.add_summary(item_summary, cx); child_end.add_summary(item_summary, self.cx);
let comparison = target.cmp(&child_end, cx); let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right) || (comparison == Ordering::Equal && bias == Bias::Right)
{ {
self.position = child_end; self.position = child_end;
aggregate.push_item(item, item_summary, cx); aggregate.push_item(item, item_summary, self.cx);
entry.index += 1; entry.index += 1;
} else { } else {
aggregate.end_leaf(cx); aggregate.end_leaf(self.cx);
break 'outer; break 'outer;
} }
} }
aggregate.end_leaf(cx); aggregate.end_leaf(self.cx);
} }
} }
@ -551,11 +532,11 @@ where
let mut end = self.position.clone(); let mut end = self.position.clone();
if bias == Bias::Left { if bias == Bias::Left {
if let Some(summary) = self.item_summary() { if let Some(summary) = self.item_summary() {
end.add_summary(summary, cx); end.add_summary(summary, self.cx);
} }
} }
target.cmp(&end, cx) == Ordering::Equal target.cmp(&end, self.cx) == Ordering::Equal
} }
} }
@ -624,21 +605,19 @@ impl<'a, T: Item> Iterator for Iter<'a, T> {
} }
} }
impl<'a, T, S, D> Iterator for Cursor<'a, T, D> impl<'a, T: Item, D> Iterator for Cursor<'a, T, D>
where where
T: Item<Summary = S>,
S: Summary<Context = ()>,
D: Dimension<'a, T::Summary>, D: Dimension<'a, T::Summary>,
{ {
type Item = &'a T; type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if !self.did_seek { if !self.did_seek {
self.next(&()); self.next();
} }
if let Some(item) = self.item() { if let Some(item) = self.item() {
self.next(&()); self.next();
Some(item) Some(item)
} else { } else {
None None
@ -651,7 +630,7 @@ pub struct FilterCursor<'a, F, T: Item, D> {
filter_node: F, filter_node: F,
} }
impl<'a, F, T, D> FilterCursor<'a, F, T, D> impl<'a, F, T: Item, D> FilterCursor<'a, F, T, D>
where where
F: FnMut(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,
T: Item, T: Item,
@ -659,7 +638,7 @@ where
{ {
pub fn new( pub fn new(
tree: &'a SumTree<T>, tree: &'a SumTree<T>,
cx: &<T::Summary as Summary>::Context, cx: &'a <T::Summary as Summary>::Context,
filter_node: F, filter_node: F,
) -> Self { ) -> Self {
let cursor = tree.cursor::<D>(cx); let cursor = tree.cursor::<D>(cx);
@ -673,8 +652,8 @@ where
self.cursor.start() self.cursor.start()
} }
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D { pub fn end(&self) -> D {
self.cursor.end(cx) self.cursor.end()
} }
pub fn item(&self) -> Option<&'a T> { pub fn item(&self) -> Option<&'a T> {
@ -685,31 +664,29 @@ where
self.cursor.item_summary() self.cursor.item_summary()
} }
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) { pub fn next(&mut self) {
self.cursor.search_forward(&mut self.filter_node, cx); self.cursor.search_forward(&mut self.filter_node);
} }
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) { pub fn prev(&mut self) {
self.cursor.search_backward(&mut self.filter_node, cx); self.cursor.search_backward(&mut self.filter_node);
} }
} }
impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U> impl<'a, F, T: Item, U> Iterator for FilterCursor<'a, F, T, U>
where where
F: FnMut(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,
T: Item<Summary = S>,
S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
U: Dimension<'a, T::Summary>, U: Dimension<'a, T::Summary>,
{ {
type Item = &'a T; type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if !self.cursor.did_seek { if !self.cursor.did_seek {
self.next(&()); self.next();
} }
if let Some(item) = self.item() { if let Some(item) = self.item() {
self.cursor.search_forward(&mut self.filter_node, &()); self.cursor.search_forward(&mut self.filter_node);
Some(item) Some(item)
} else { } else {
None None
@ -795,3 +772,23 @@ where
self.0.add_summary(summary, cx); self.0.add_summary(summary, cx);
} }
} }
struct End<D>(PhantomData<D>);
impl<D> End<D> {
fn new() -> Self {
Self(PhantomData)
}
}
impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
Ordering::Greater
}
}
impl<D> fmt::Debug for End<D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("End").finish()
}
}

View file

@ -38,7 +38,6 @@ pub trait Summary: Clone {
type Context; type Context;
fn zero(cx: &Self::Context) -> Self; fn zero(cx: &Self::Context) -> Self;
fn add_summary(&mut self, summary: &Self, cx: &Self::Context); fn add_summary(&mut self, summary: &Self, cx: &Self::Context);
} }
@ -138,26 +137,6 @@ where
} }
} }
struct End<D>(PhantomData<D>);
impl<D> End<D> {
fn new() -> Self {
Self(PhantomData)
}
}
impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
Ordering::Greater
}
}
impl<D> fmt::Debug for End<D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("End").finish()
}
}
/// Bias is used to settle ambiguities when determining positions in an ordered sequence. /// Bias is used to settle ambiguities when determining positions in an ordered sequence.
/// ///
/// The primary use case is for text, where Bias influences /// The primary use case is for text, where Bias influences
@ -372,10 +351,10 @@ impl<T: Item> SumTree<T> {
pub fn items(&self, cx: &<T::Summary as Summary>::Context) -> Vec<T> { pub fn items(&self, cx: &<T::Summary as Summary>::Context) -> Vec<T> {
let mut items = Vec::new(); let mut items = Vec::new();
let mut cursor = self.cursor::<()>(cx); let mut cursor = self.cursor::<()>(cx);
cursor.next(cx); cursor.next();
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
items.push(item.clone()); items.push(item.clone());
cursor.next(cx); cursor.next();
} }
items items
} }
@ -384,7 +363,7 @@ impl<T: Item> SumTree<T> {
Iter::new(self) Iter::new(self)
} }
pub fn cursor<'a, S>(&'a self, cx: &<T::Summary as Summary>::Context) -> Cursor<'a, T, S> pub fn cursor<'a, S>(&'a self, cx: &'a <T::Summary as Summary>::Context) -> Cursor<'a, T, S>
where where
S: Dimension<'a, T::Summary>, S: Dimension<'a, T::Summary>,
{ {
@ -395,7 +374,7 @@ impl<T: Item> SumTree<T> {
/// that is returned cannot be used with Rust's iterators. /// that is returned cannot be used with Rust's iterators.
pub fn filter<'a, F, U>( pub fn filter<'a, F, U>(
&'a self, &'a self,
cx: &<T::Summary as Summary>::Context, cx: &'a <T::Summary as Summary>::Context,
filter_node: F, filter_node: F,
) -> FilterCursor<'a, F, T, U> ) -> FilterCursor<'a, F, T, U>
where where
@ -525,10 +504,6 @@ impl<T: Item> SumTree<T> {
} }
} }
pub fn ptr_eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
fn push_tree_recursive( fn push_tree_recursive(
&mut self, &mut self,
other: SumTree<T>, other: SumTree<T>,
@ -686,11 +661,6 @@ impl<T: Item> SumTree<T> {
} => child_trees.last().unwrap().rightmost_leaf(), } => child_trees.last().unwrap().rightmost_leaf(),
} }
} }
#[cfg(debug_assertions)]
pub fn _debug_entries(&self) -> Vec<&T> {
self.iter().collect::<Vec<_>>()
}
} }
impl<T: Item + PartialEq> PartialEq for SumTree<T> { impl<T: Item + PartialEq> PartialEq for SumTree<T> {
@ -710,15 +680,15 @@ impl<T: KeyedItem> SumTree<T> {
let mut replaced = None; let mut replaced = None;
*self = { *self = {
let mut cursor = self.cursor::<T::Key>(cx); let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx); let mut new_tree = cursor.slice(&item.key(), Bias::Left);
if let Some(cursor_item) = cursor.item() { if let Some(cursor_item) = cursor.item() {
if cursor_item.key() == item.key() { if cursor_item.key() == item.key() {
replaced = Some(cursor_item.clone()); replaced = Some(cursor_item.clone());
cursor.next(cx); cursor.next();
} }
} }
new_tree.push(item, cx); new_tree.push(item, cx);
new_tree.append(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(), cx);
new_tree new_tree
}; };
replaced replaced
@ -728,14 +698,14 @@ impl<T: KeyedItem> SumTree<T> {
let mut removed = None; let mut removed = None;
*self = { *self = {
let mut cursor = self.cursor::<T::Key>(cx); let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(key, Bias::Left, cx); let mut new_tree = cursor.slice(key, Bias::Left);
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
if item.key() == *key { if item.key() == *key {
removed = Some(item.clone()); removed = Some(item.clone());
cursor.next(cx); cursor.next();
} }
} }
new_tree.append(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(), cx);
new_tree new_tree
}; };
removed removed
@ -758,7 +728,7 @@ impl<T: KeyedItem> SumTree<T> {
let mut new_tree = SumTree::new(cx); let mut new_tree = SumTree::new(cx);
let mut buffered_items = Vec::new(); let mut buffered_items = Vec::new();
cursor.seek(&T::Key::zero(cx), Bias::Left, cx); cursor.seek(&T::Key::zero(cx), Bias::Left);
for edit in edits { for edit in edits {
let new_key = edit.key(); let new_key = edit.key();
let mut old_item = cursor.item(); let mut old_item = cursor.item();
@ -768,7 +738,7 @@ impl<T: KeyedItem> SumTree<T> {
.map_or(false, |old_item| old_item.key() < new_key) .map_or(false, |old_item| old_item.key() < new_key)
{ {
new_tree.extend(buffered_items.drain(..), cx); new_tree.extend(buffered_items.drain(..), cx);
let slice = cursor.slice(&new_key, Bias::Left, cx); let slice = cursor.slice(&new_key, Bias::Left);
new_tree.append(slice, cx); new_tree.append(slice, cx);
old_item = cursor.item(); old_item = cursor.item();
} }
@ -776,7 +746,7 @@ impl<T: KeyedItem> SumTree<T> {
if let Some(old_item) = old_item { if let Some(old_item) = old_item {
if old_item.key() == new_key { if old_item.key() == new_key {
removed.push(old_item.clone()); removed.push(old_item.clone());
cursor.next(cx); cursor.next();
} }
} }
@ -789,70 +759,25 @@ impl<T: KeyedItem> SumTree<T> {
} }
new_tree.extend(buffered_items, cx); new_tree.extend(buffered_items, cx);
new_tree.append(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(), cx);
new_tree new_tree
}; };
removed removed
} }
pub fn get(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> Option<&T> { pub fn get<'a>(
&'a self,
key: &T::Key,
cx: &'a <T::Summary as Summary>::Context,
) -> Option<&'a T> {
let mut cursor = self.cursor::<T::Key>(cx); let mut cursor = self.cursor::<T::Key>(cx);
if cursor.seek(key, Bias::Left, cx) { if cursor.seek(key, Bias::Left) {
cursor.item() cursor.item()
} else { } else {
None None
} }
} }
#[inline]
pub fn contains(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> bool {
self.get(key, cx).is_some()
}
pub fn update<F, R>(
&mut self,
key: &T::Key,
cx: &<T::Summary as Summary>::Context,
f: F,
) -> Option<R>
where
F: FnOnce(&mut T) -> R,
{
let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(key, Bias::Left, cx);
let mut result = None;
if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal {
let mut updated = cursor.item().unwrap().clone();
result = Some(f(&mut updated));
new_tree.push(updated, cx);
cursor.next(cx);
}
new_tree.append(cursor.suffix(cx), cx);
drop(cursor);
*self = new_tree;
result
}
pub fn retain<F: FnMut(&T) -> bool>(
&mut self,
cx: &<T::Summary as Summary>::Context,
mut predicate: F,
) {
let mut new_map = SumTree::new(cx);
let mut cursor = self.cursor::<T::Key>(cx);
cursor.next(cx);
while let Some(item) = cursor.item() {
if predicate(&item) {
new_map.push(item.clone(), cx);
}
cursor.next(cx);
}
drop(cursor);
*self = new_map;
}
} }
impl<T, S> Default for SumTree<T> impl<T, S> Default for SumTree<T>
@ -1061,14 +986,14 @@ mod tests {
tree = { tree = {
let mut cursor = tree.cursor::<Count>(&()); let mut cursor = tree.cursor::<Count>(&());
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right);
if rng.r#gen() { if rng.r#gen() {
new_tree.extend(new_items, &()); new_tree.extend(new_items, &());
} else { } else {
new_tree.par_extend(new_items, &()); new_tree.par_extend(new_items, &());
} }
cursor.seek(&Count(splice_end), Bias::Right, &()); cursor.seek(&Count(splice_end), Bias::Right);
new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &()); new_tree.append(cursor.slice(&tree_end, Bias::Right), &());
new_tree new_tree
}; };
@ -1090,10 +1015,10 @@ mod tests {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut item_ix = if rng.r#gen() { let mut item_ix = if rng.r#gen() {
filter_cursor.next(&()); filter_cursor.next();
0 0
} else { } else {
filter_cursor.prev(&()); filter_cursor.prev();
expected_filtered_items.len().saturating_sub(1) expected_filtered_items.len().saturating_sub(1)
}; };
while item_ix < expected_filtered_items.len() { while item_ix < expected_filtered_items.len() {
@ -1103,19 +1028,19 @@ mod tests {
assert_eq!(actual_item, &reference_item); assert_eq!(actual_item, &reference_item);
assert_eq!(filter_cursor.start().0, reference_index); assert_eq!(filter_cursor.start().0, reference_index);
log::info!("next"); log::info!("next");
filter_cursor.next(&()); filter_cursor.next();
item_ix += 1; item_ix += 1;
while item_ix > 0 && rng.gen_bool(0.2) { while item_ix > 0 && rng.gen_bool(0.2) {
log::info!("prev"); log::info!("prev");
filter_cursor.prev(&()); filter_cursor.prev();
item_ix -= 1; item_ix -= 1;
if item_ix == 0 && rng.gen_bool(0.2) { if item_ix == 0 && rng.gen_bool(0.2) {
filter_cursor.prev(&()); filter_cursor.prev();
assert_eq!(filter_cursor.item(), None); assert_eq!(filter_cursor.item(), None);
assert_eq!(filter_cursor.start().0, 0); assert_eq!(filter_cursor.start().0, 0);
filter_cursor.next(&()); filter_cursor.next();
} }
} }
} }
@ -1124,9 +1049,9 @@ mod tests {
let mut before_start = false; let mut before_start = false;
let mut cursor = tree.cursor::<Count>(&()); let mut cursor = tree.cursor::<Count>(&());
let start_pos = rng.gen_range(0..=reference_items.len()); let start_pos = rng.gen_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right, &()); cursor.seek(&Count(start_pos), Bias::Right);
let mut pos = rng.gen_range(start_pos..=reference_items.len()); let mut pos = rng.gen_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right, &()); cursor.seek_forward(&Count(pos), Bias::Right);
for i in 0..10 { for i in 0..10 {
assert_eq!(cursor.start().0, pos); assert_eq!(cursor.start().0, pos);
@ -1152,13 +1077,13 @@ mod tests {
} }
if i < 5 { if i < 5 {
cursor.next(&()); cursor.next();
if pos < reference_items.len() { if pos < reference_items.len() {
pos += 1; pos += 1;
before_start = false; before_start = false;
} }
} else { } else {
cursor.prev(&()); cursor.prev();
if pos == 0 { if pos == 0 {
before_start = true; before_start = true;
} }
@ -1174,11 +1099,11 @@ mod tests {
let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
let mut cursor = tree.cursor::<Count>(&()); let mut cursor = tree.cursor::<Count>(&());
cursor.seek(&Count(start), start_bias, &()); cursor.seek(&Count(start), start_bias);
let slice = cursor.slice(&Count(end), end_bias, &()); let slice = cursor.slice(&Count(end), end_bias);
cursor.seek(&Count(start), start_bias, &()); cursor.seek(&Count(start), start_bias);
let summary = cursor.summary::<_, Sum>(&Count(end), end_bias, &()); let summary = cursor.summary::<_, Sum>(&Count(end), end_bias);
assert_eq!(summary.0, slice.summary().sum); assert_eq!(summary.0, slice.summary().sum);
} }
@ -1191,19 +1116,19 @@ mod tests {
let tree = SumTree::<u8>::default(); let tree = SumTree::<u8>::default();
let mut cursor = tree.cursor::<IntegersSummary>(&()); let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!( assert_eq!(
cursor.slice(&Count(0), Bias::Right, &()).items(&()), cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new() Vec::<u8>::new()
); );
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
@ -1214,7 +1139,7 @@ mod tests {
tree.extend(vec![1], &()); tree.extend(vec![1], &());
let mut cursor = tree.cursor::<IntegersSummary>(&()); let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!( assert_eq!(
cursor.slice(&Count(0), Bias::Right, &()).items(&()), cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new() Vec::<u8>::new()
); );
assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.item(), Some(&1));
@ -1222,29 +1147,29 @@ mod tests {
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1); assert_eq!(cursor.start().sum, 1);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
let mut cursor = tree.cursor::<IntegersSummary>(&()); let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); assert_eq!(cursor.slice(&Count(1), Bias::Right).items(&()), [1]);
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1); assert_eq!(cursor.start().sum, 1);
cursor.seek(&Count(0), Bias::Right, &()); cursor.seek(&Count(0), Bias::Right);
assert_eq!( assert_eq!(
cursor cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &()) .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()), .items(&()),
[1] [1]
); );
@ -1258,80 +1183,80 @@ mod tests {
tree.extend(vec![1, 2, 3, 4, 5, 6], &()); tree.extend(vec![1, 2, 3, 4, 5, 6], &());
let mut cursor = tree.cursor::<IntegersSummary>(&()); let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); assert_eq!(cursor.slice(&Count(2), Bias::Right).items(&()), [1, 2]);
assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4)); assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3); assert_eq!(cursor.start().sum, 3);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5)); assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6); assert_eq!(cursor.start().sum, 6);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6)); assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10); assert_eq!(cursor.start().sum, 10);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15); assert_eq!(cursor.start().sum, 15);
cursor.next(&()); cursor.next();
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&6));
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21); assert_eq!(cursor.start().sum, 21);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15); assert_eq!(cursor.start().sum, 15);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6)); assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10); assert_eq!(cursor.start().sum, 10);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5)); assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6); assert_eq!(cursor.start().sum, 6);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4)); assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3); assert_eq!(cursor.start().sum, 3);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&2)); assert_eq!(cursor.item(), Some(&2));
assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), Some(&3)); assert_eq!(cursor.next_item(), Some(&3));
assert_eq!(cursor.start().sum, 1); assert_eq!(cursor.start().sum, 1);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2)); assert_eq!(cursor.next_item(), Some(&2));
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
cursor.prev(&()); cursor.prev();
assert_eq!(cursor.item(), None); assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&1)); assert_eq!(cursor.next_item(), Some(&1));
assert_eq!(cursor.start().sum, 0); assert_eq!(cursor.start().sum, 0);
cursor.next(&()); cursor.next();
assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2)); assert_eq!(cursor.next_item(), Some(&2));
@ -1340,7 +1265,7 @@ mod tests {
let mut cursor = tree.cursor::<IntegersSummary>(&()); let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!( assert_eq!(
cursor cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &()) .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()), .items(&()),
tree.items(&()) tree.items(&())
); );
@ -1349,10 +1274,10 @@ mod tests {
assert_eq!(cursor.next_item(), None); assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21); assert_eq!(cursor.start().sum, 21);
cursor.seek(&Count(3), Bias::Right, &()); cursor.seek(&Count(3), Bias::Right);
assert_eq!( assert_eq!(
cursor cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &()) .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()), .items(&()),
[4, 5, 6] [4, 5, 6]
); );
@ -1362,25 +1287,16 @@ mod tests {
assert_eq!(cursor.start().sum, 21); assert_eq!(cursor.start().sum, 21);
// Seeking can bias left or right // Seeking can bias left or right
cursor.seek(&Count(1), Bias::Left, &()); cursor.seek(&Count(1), Bias::Left);
assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.item(), Some(&1));
cursor.seek(&Count(1), Bias::Right, &()); cursor.seek(&Count(1), Bias::Right);
assert_eq!(cursor.item(), Some(&2)); assert_eq!(cursor.item(), Some(&2));
// Slicing without resetting starts from where the cursor is parked at. // Slicing without resetting starts from where the cursor is parked at.
cursor.seek(&Count(1), Bias::Right, &()); cursor.seek(&Count(1), Bias::Right);
assert_eq!( assert_eq!(cursor.slice(&Count(3), Bias::Right).items(&()), vec![2, 3]);
cursor.slice(&Count(3), Bias::Right, &()).items(&()), assert_eq!(cursor.slice(&Count(6), Bias::Left).items(&()), vec![4, 5]);
vec![2, 3] assert_eq!(cursor.slice(&Count(6), Bias::Right).items(&()), vec![6]);
);
assert_eq!(
cursor.slice(&Count(6), Bias::Left, &()).items(&()),
vec![4, 5]
);
assert_eq!(
cursor.slice(&Count(6), Bias::Right, &()).items(&()),
vec![6]
);
} }
#[test] #[test]

View file

@ -54,7 +54,7 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn get(&self, key: &K) -> Option<&V> { pub fn get(&self, key: &K) -> Option<&V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &()); cursor.seek(&MapKeyRef(Some(key)), Bias::Left);
if let Some(item) = cursor.item() { if let Some(item) = cursor.item() {
if Some(key) == item.key().0.as_ref() { if Some(key) == item.key().0.as_ref() {
Some(&item.value) Some(&item.value)
@ -86,12 +86,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut removed = None; let mut removed = None;
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key)); let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &()); let mut new_tree = cursor.slice(&key, Bias::Left);
if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { if key.cmp(&cursor.end(), &()) == Ordering::Equal {
removed = Some(cursor.item().unwrap().value.clone()); removed = Some(cursor.item().unwrap().value.clone());
cursor.next(&()); cursor.next();
} }
new_tree.append(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
removed removed
@ -101,9 +101,9 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let start = MapSeekTargetAdaptor(start); let start = MapSeekTargetAdaptor(start);
let end = MapSeekTargetAdaptor(end); let end = MapSeekTargetAdaptor(end);
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let mut new_tree = cursor.slice(&start, Bias::Left, &()); let mut new_tree = cursor.slice(&start, Bias::Left);
cursor.seek(&end, Bias::Left, &()); cursor.seek(&end, Bias::Left);
new_tree.append(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
} }
@ -112,15 +112,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn closest(&self, key: &K) -> Option<(&K, &V)> { pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key)); let key = MapKeyRef(Some(key));
cursor.seek(&key, Bias::Right, &()); cursor.seek(&key, Bias::Right);
cursor.prev(&()); cursor.prev();
cursor.item().map(|item| (&item.key, &item.value)) cursor.item().map(|item| (&item.key, &item.value))
} }
pub fn iter_from<'a>(&'a self, from: &K) -> impl Iterator<Item = (&'a K, &'a V)> + 'a { pub fn iter_from<'a>(&'a self, from: &K) -> impl Iterator<Item = (&'a K, &'a V)> + 'a {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let from_key = MapKeyRef(Some(from)); let from_key = MapKeyRef(Some(from));
cursor.seek(&from_key, Bias::Left, &()); cursor.seek(&from_key, Bias::Left);
cursor.map(|map_entry| (&map_entry.key, &map_entry.value)) cursor.map(|map_entry| (&map_entry.key, &map_entry.value))
} }
@ -131,15 +131,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
{ {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key)); let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &()); let mut new_tree = cursor.slice(&key, Bias::Left);
let mut result = None; let mut result = None;
if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { if key.cmp(&cursor.end(), &()) == Ordering::Equal {
let mut updated = cursor.item().unwrap().clone(); let mut updated = cursor.item().unwrap().clone();
result = Some(f(&mut updated.value)); result = Some(f(&mut updated.value));
new_tree.push(updated, &()); new_tree.push(updated, &());
cursor.next(&()); cursor.next();
} }
new_tree.append(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
result result
@ -149,12 +149,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut new_map = SumTree::<MapEntry<K, V>>::default(); let mut new_map = SumTree::<MapEntry<K, V>>::default();
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&()); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
cursor.next(&()); cursor.next();
while let Some(item) = cursor.item() { while let Some(item) = cursor.item() {
if predicate(&item.key, &item.value) { if predicate(&item.key, &item.value) {
new_map.push(item.clone(), &()); new_map.push(item.clone(), &());
} }
cursor.next(&()); cursor.next();
} }
drop(cursor); drop(cursor);

View file

@ -101,7 +101,7 @@ impl Anchor {
} else { } else {
let fragment_id = buffer.fragment_id_for_anchor(self); let fragment_id = buffer.fragment_id_for_anchor(self);
let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None); let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None);
fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None); fragment_cursor.seek(&Some(fragment_id), Bias::Left);
fragment_cursor fragment_cursor
.item() .item()
.map_or(false, |fragment| fragment.visible) .map_or(false, |fragment| fragment.visible)

View file

@ -320,7 +320,39 @@ impl History {
last_edit_at: now, last_edit_at: now,
suppress_grouping: false, suppress_grouping: false,
}); });
self.redo_stack.clear(); }
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
fn push_empty_transaction(
&mut self,
start: clock::Global,
now: Instant,
clock: &mut clock::Lamport,
) -> TransactionId {
assert_eq!(self.transaction_depth, 0);
let id = clock.tick();
let transaction = Transaction {
id,
start,
edit_ids: Vec::new(),
};
self.undo_stack.push(HistoryEntry {
transaction,
first_edit_at: now,
last_edit_at: now,
suppress_grouping: false,
});
id
} }
fn push_undo(&mut self, op_id: clock::Lamport) { fn push_undo(&mut self, op_id: clock::Lamport) {
@ -824,14 +856,13 @@ impl Buffer {
let mut new_ropes = let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None); let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
let mut new_fragments = let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
new_ropes.append(new_fragments.summary().text); new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().visible; let mut fragment_start = old_fragments.start().visible;
for (range, new_text) in edits { for (range, new_text) in edits {
let new_text = LineEnding::normalize_arc(new_text.into()); let new_text = LineEnding::normalize_arc(new_text.into());
let fragment_end = old_fragments.end(&None).visible; let fragment_end = old_fragments.end().visible;
// If the current fragment ends before this range, then jump ahead to the first fragment // If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments. // that extends past the start of this range, reusing any intervening fragments.
@ -847,10 +878,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible); new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None); new_fragments.push(suffix, &None);
} }
old_fragments.next(&None); old_fragments.next();
} }
let slice = old_fragments.slice(&range.start, Bias::Right, &None); let slice = old_fragments.slice(&range.start, Bias::Right);
new_ropes.append(slice.summary().text); new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None); new_fragments.append(slice, &None);
fragment_start = old_fragments.start().visible; fragment_start = old_fragments.start().visible;
@ -903,7 +934,7 @@ impl Buffer {
// portions as deleted. // portions as deleted.
while fragment_start < range.end { while fragment_start < range.end {
let fragment = old_fragments.item().unwrap(); let fragment = old_fragments.item().unwrap();
let fragment_end = old_fragments.end(&None).visible; let fragment_end = old_fragments.end().visible;
let mut intersection = fragment.clone(); let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end); let intersection_end = cmp::min(range.end, fragment_end);
if fragment.visible { if fragment.visible {
@ -930,7 +961,7 @@ impl Buffer {
fragment_start = intersection_end; fragment_start = intersection_end;
} }
if fragment_end <= range.end { if fragment_end <= range.end {
old_fragments.next(&None); old_fragments.next();
} }
} }
@ -942,7 +973,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it // If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing. // and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().visible { if fragment_start > old_fragments.start().visible {
let fragment_end = old_fragments.end(&None).visible; let fragment_end = old_fragments.end().visible;
if fragment_end > fragment_start { if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone(); let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end - fragment_start; suffix.len = fragment_end - fragment_start;
@ -951,10 +982,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible); new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None); new_fragments.push(suffix, &None);
} }
old_fragments.next(&None); old_fragments.next();
} }
let suffix = old_fragments.suffix(&None); let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None); new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish(); let (visible_text, deleted_text) = new_ropes.finish();
@ -1041,16 +1072,13 @@ impl Buffer {
let mut new_ropes = let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx);
let mut new_fragments = old_fragments.slice( let mut new_fragments =
&VersionedFullOffset::Offset(ranges[0].start), old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
Bias::Left,
&cx,
);
new_ropes.append(new_fragments.summary().text); new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().0.full_offset(); let mut fragment_start = old_fragments.start().0.full_offset();
for (range, new_text) in edits { for (range, new_text) in edits {
let fragment_end = old_fragments.end(&cx).0.full_offset(); let fragment_end = old_fragments.end().0.full_offset();
// If the current fragment ends before this range, then jump ahead to the first fragment // If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments. // that extends past the start of this range, reusing any intervening fragments.
@ -1067,18 +1095,18 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible); new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None); new_fragments.push(suffix, &None);
} }
old_fragments.next(&cx); old_fragments.next();
} }
let slice = let slice =
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
new_ropes.append(slice.summary().text); new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None); new_fragments.append(slice, &None);
fragment_start = old_fragments.start().0.full_offset(); fragment_start = old_fragments.start().0.full_offset();
} }
// If we are at the end of a non-concurrent fragment, advance to the next one. // If we are at the end of a non-concurrent fragment, advance to the next one.
let fragment_end = old_fragments.end(&cx).0.full_offset(); let fragment_end = old_fragments.end().0.full_offset();
if fragment_end == range.start && fragment_end > fragment_start { if fragment_end == range.start && fragment_end > fragment_start {
let mut fragment = old_fragments.item().unwrap().clone(); let mut fragment = old_fragments.item().unwrap().clone();
fragment.len = fragment_end.0 - fragment_start.0; fragment.len = fragment_end.0 - fragment_start.0;
@ -1086,7 +1114,7 @@ impl Buffer {
new_insertions.push(InsertionFragment::insert_new(&fragment)); new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_fragment(&fragment, fragment.visible); new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &None); new_fragments.push(fragment, &None);
old_fragments.next(&cx); old_fragments.next();
fragment_start = old_fragments.start().0.full_offset(); fragment_start = old_fragments.start().0.full_offset();
} }
@ -1096,7 +1124,7 @@ impl Buffer {
if fragment_start == range.start && fragment.timestamp > timestamp { if fragment_start == range.start && fragment.timestamp > timestamp {
new_ropes.push_fragment(fragment, fragment.visible); new_ropes.push_fragment(fragment, fragment.visible);
new_fragments.push(fragment.clone(), &None); new_fragments.push(fragment.clone(), &None);
old_fragments.next(&cx); old_fragments.next();
debug_assert_eq!(fragment_start, range.start); debug_assert_eq!(fragment_start, range.start);
} else { } else {
break; break;
@ -1152,7 +1180,7 @@ impl Buffer {
// portions as deleted. // portions as deleted.
while fragment_start < range.end { while fragment_start < range.end {
let fragment = old_fragments.item().unwrap(); let fragment = old_fragments.item().unwrap();
let fragment_end = old_fragments.end(&cx).0.full_offset(); let fragment_end = old_fragments.end().0.full_offset();
let mut intersection = fragment.clone(); let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end); let intersection_end = cmp::min(range.end, fragment_end);
if fragment.was_visible(version, &self.undo_map) { if fragment.was_visible(version, &self.undo_map) {
@ -1181,7 +1209,7 @@ impl Buffer {
fragment_start = intersection_end; fragment_start = intersection_end;
} }
if fragment_end <= range.end { if fragment_end <= range.end {
old_fragments.next(&cx); old_fragments.next();
} }
} }
} }
@ -1189,7 +1217,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it // If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing. // and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().0.full_offset() { if fragment_start > old_fragments.start().0.full_offset() {
let fragment_end = old_fragments.end(&cx).0.full_offset(); let fragment_end = old_fragments.end().0.full_offset();
if fragment_end > fragment_start { if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone(); let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0; suffix.len = fragment_end.0 - fragment_start.0;
@ -1198,10 +1226,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible); new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None); new_fragments.push(suffix, &None);
} }
old_fragments.next(&cx); old_fragments.next();
} }
let suffix = old_fragments.suffix(&cx); let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None); new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish(); let (visible_text, deleted_text) = new_ropes.finish();
@ -1250,7 +1278,6 @@ impl Buffer {
split_offset: insertion_slice.range.start, split_offset: insertion_slice.range.start,
}, },
Bias::Left, Bias::Left,
&(),
); );
} }
while let Some(item) = insertions_cursor.item() { while let Some(item) = insertions_cursor.item() {
@ -1260,7 +1287,7 @@ impl Buffer {
break; break;
} }
fragment_ids.push(&item.fragment_id); fragment_ids.push(&item.fragment_id);
insertions_cursor.next(&()); insertions_cursor.next();
} }
} }
fragment_ids.sort_unstable(); fragment_ids.sort_unstable();
@ -1277,7 +1304,7 @@ impl Buffer {
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) { for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None); let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
new_ropes.append(preceding_fragments.summary().text); new_ropes.append(preceding_fragments.summary().text);
new_fragments.append(preceding_fragments, &None); new_fragments.append(preceding_fragments, &None);
@ -1304,11 +1331,11 @@ impl Buffer {
new_ropes.push_fragment(&fragment, fragment_was_visible); new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &None); new_fragments.push(fragment, &None);
old_fragments.next(&None); old_fragments.next();
} }
} }
let suffix = old_fragments.suffix(&None); let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None); new_fragments.append(suffix, &None);
@ -1495,6 +1522,24 @@ impl Buffer {
self.history.push_transaction(transaction, now); self.history.push_transaction(transaction, now);
} }
/// Differs from `push_transaction` in that it does not clear the redo stack.
/// The caller responsible for
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
self.history
.push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
}
pub fn edited_ranges_for_transaction_id<D>( pub fn edited_ranges_for_transaction_id<D>(
&self, &self,
transaction_id: TransactionId, transaction_id: TransactionId,
@ -1521,7 +1566,7 @@ impl Buffer {
.fragment_ids_for_edits(edit_ids.into_iter()) .fragment_ids_for_edits(edit_ids.into_iter())
.into_iter() .into_iter()
.filter_map(move |fragment_id| { .filter_map(move |fragment_id| {
cursor.seek_forward(&Some(fragment_id), Bias::Left, &None); cursor.seek_forward(&Some(fragment_id), Bias::Left);
let fragment = cursor.item()?; let fragment = cursor.item()?;
let start_offset = cursor.start().1; let start_offset = cursor.start().1;
let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 }; let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
@ -1743,7 +1788,7 @@ impl Buffer {
let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None); let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) { for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) {
cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
let fragment = cursor.item().unwrap(); let fragment = cursor.item().unwrap();
assert_eq!(insertion_fragment.fragment_id, fragment.id); assert_eq!(insertion_fragment.fragment_id, fragment.id);
assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
@ -1862,7 +1907,7 @@ impl BufferSnapshot {
.filter::<_, FragmentTextSummary>(&None, move |summary| { .filter::<_, FragmentTextSummary>(&None, move |summary| {
!version.observed_all(&summary.max_version) !version.observed_all(&summary.max_version)
}); });
cursor.next(&None); cursor.next();
let mut visible_cursor = self.visible_text.cursor(0); let mut visible_cursor = self.visible_text.cursor(0);
let mut deleted_cursor = self.deleted_text.cursor(0); let mut deleted_cursor = self.deleted_text.cursor(0);
@ -1875,18 +1920,18 @@ impl BufferSnapshot {
if fragment.was_visible(version, &self.undo_map) { if fragment.was_visible(version, &self.undo_map) {
if fragment.visible { if fragment.visible {
let text = visible_cursor.slice(cursor.end(&None).visible); let text = visible_cursor.slice(cursor.end().visible);
rope.append(text); rope.append(text);
} else { } else {
deleted_cursor.seek_forward(cursor.start().deleted); deleted_cursor.seek_forward(cursor.start().deleted);
let text = deleted_cursor.slice(cursor.end(&None).deleted); let text = deleted_cursor.slice(cursor.end().deleted);
rope.append(text); rope.append(text);
} }
} else if fragment.visible { } else if fragment.visible {
visible_cursor.seek_forward(cursor.end(&None).visible); visible_cursor.seek_forward(cursor.end().visible);
} }
cursor.next(&None); cursor.next();
} }
if cursor.start().visible > visible_cursor.offset() { if cursor.start().visible > visible_cursor.offset() {
@ -2202,7 +2247,7 @@ impl BufferSnapshot {
timestamp: anchor.timestamp, timestamp: anchor.timestamp,
split_offset: anchor.offset, split_offset: anchor.offset,
}; };
insertion_cursor.seek(&anchor_key, anchor.bias, &()); insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() { if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater if comparison == Ordering::Greater
@ -2210,15 +2255,15 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal && comparison == Ordering::Equal
&& anchor.offset > 0) && anchor.offset > 0)
{ {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
} else { } else {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
let insertion = insertion_cursor.item().expect("invalid insertion"); let insertion = insertion_cursor.item().expect("invalid insertion");
assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None); fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap(); let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1; let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible { if fragment.visible {
@ -2249,7 +2294,7 @@ impl BufferSnapshot {
split_offset: anchor.offset, split_offset: anchor.offset,
}; };
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&()); let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
insertion_cursor.seek(&anchor_key, anchor.bias, &()); insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() { if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater if comparison == Ordering::Greater
@ -2257,10 +2302,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal && comparison == Ordering::Equal
&& anchor.offset > 0) && anchor.offset > 0)
{ {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
} else { } else {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
let Some(insertion) = insertion_cursor let Some(insertion) = insertion_cursor
@ -2274,7 +2319,7 @@ impl BufferSnapshot {
}; };
let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None);
fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap(); let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1; let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible { if fragment.visible {
@ -2295,7 +2340,7 @@ impl BufferSnapshot {
split_offset: anchor.offset, split_offset: anchor.offset,
}; };
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&()); let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
insertion_cursor.seek(&anchor_key, anchor.bias, &()); insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() { if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater if comparison == Ordering::Greater
@ -2303,10 +2348,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal && comparison == Ordering::Equal
&& anchor.offset > 0) && anchor.offset > 0)
{ {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
} else { } else {
insertion_cursor.prev(&()); insertion_cursor.prev();
} }
let Some(insertion) = insertion_cursor.item().filter(|insertion| { let Some(insertion) = insertion_cursor.item().filter(|insertion| {
@ -2345,7 +2390,7 @@ impl BufferSnapshot {
Anchor::MAX Anchor::MAX
} else { } else {
let mut fragment_cursor = self.fragments.cursor::<usize>(&None); let mut fragment_cursor = self.fragments.cursor::<usize>(&None);
fragment_cursor.seek(&offset, bias, &None); fragment_cursor.seek(&offset, bias);
let fragment = fragment_cursor.item().unwrap(); let fragment = fragment_cursor.item().unwrap();
let overshoot = offset - *fragment_cursor.start(); let overshoot = offset - *fragment_cursor.start();
Anchor { Anchor {
@ -2425,7 +2470,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter(&None, move |summary| { let mut cursor = self.fragments.filter(&None, move |summary| {
!since.observed_all(&summary.max_version) !since.observed_all(&summary.max_version)
}); });
cursor.next(&None); cursor.next();
Some(cursor) Some(cursor)
}; };
let mut cursor = self let mut cursor = self
@ -2433,7 +2478,7 @@ impl BufferSnapshot {
.cursor::<(Option<&Locator>, FragmentTextSummary)>(&None); .cursor::<(Option<&Locator>, FragmentTextSummary)>(&None);
let start_fragment_id = self.fragment_id_for_anchor(&range.start); let start_fragment_id = self.fragment_id_for_anchor(&range.start);
cursor.seek(&Some(start_fragment_id), Bias::Left, &None); cursor.seek(&Some(start_fragment_id), Bias::Left);
let mut visible_start = cursor.start().1.visible; let mut visible_start = cursor.start().1.visible;
let mut deleted_start = cursor.start().1.deleted; let mut deleted_start = cursor.start().1.deleted;
if let Some(fragment) = cursor.item() { if let Some(fragment) = cursor.item() {
@ -2466,7 +2511,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version) !since.observed_all(&summary.max_version)
}); });
cursor.next(&None); cursor.next();
while let Some(fragment) = cursor.item() { while let Some(fragment) = cursor.item() {
if fragment.id > *end_fragment_id { if fragment.id > *end_fragment_id {
break; break;
@ -2478,7 +2523,7 @@ impl BufferSnapshot {
return true; return true;
} }
} }
cursor.next(&None); cursor.next();
} }
} }
false false
@ -2489,14 +2534,14 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version) !since.observed_all(&summary.max_version)
}); });
cursor.next(&None); cursor.next();
while let Some(fragment) = cursor.item() { while let Some(fragment) = cursor.item() {
let was_visible = fragment.was_visible(since, &self.undo_map); let was_visible = fragment.was_visible(since, &self.undo_map);
let is_visible = fragment.visible; let is_visible = fragment.visible;
if was_visible != is_visible { if was_visible != is_visible {
return true; return true;
} }
cursor.next(&None); cursor.next();
} }
} }
false false
@ -2601,7 +2646,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
while let Some(fragment) = cursor.item() { while let Some(fragment) = cursor.item() {
if fragment.id < *self.range.start.0 { if fragment.id < *self.range.start.0 {
cursor.next(&None); cursor.next();
continue; continue;
} else if fragment.id > *self.range.end.0 { } else if fragment.id > *self.range.end.0 {
break; break;
@ -2634,7 +2679,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
}; };
if !fragment.was_visible(self.since, self.undos) && fragment.visible { if !fragment.was_visible(self.since, self.undos) && fragment.visible {
let mut visible_end = cursor.end(&None).visible; let mut visible_end = cursor.end().visible;
if fragment.id == *self.range.end.0 { if fragment.id == *self.range.end.0 {
visible_end = cmp::min( visible_end = cmp::min(
visible_end, visible_end,
@ -2660,7 +2705,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.new_end = new_end; self.new_end = new_end;
} else if fragment.was_visible(self.since, self.undos) && !fragment.visible { } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
let mut deleted_end = cursor.end(&None).deleted; let mut deleted_end = cursor.end().deleted;
if fragment.id == *self.range.end.0 { if fragment.id == *self.range.end.0 {
deleted_end = cmp::min( deleted_end = cmp::min(
deleted_end, deleted_end,
@ -2690,7 +2735,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.old_end = old_end; self.old_end = old_end;
} }
cursor.next(&None); cursor.next();
} }
pending_edit pending_edit

View file

@ -74,7 +74,6 @@ impl UndoMap {
undo_id: Default::default(), undo_id: Default::default(),
}, },
Bias::Left, Bias::Left,
&(),
); );
let mut undo_count = 0; let mut undo_count = 0;
@ -99,7 +98,6 @@ impl UndoMap {
undo_id: Default::default(), undo_id: Default::default(),
}, },
Bias::Left, Bias::Left,
&(),
); );
let mut undo_count = 0; let mut undo_count = 0;

View file

@ -11,8 +11,8 @@ use gpui::{App, Task, Window, actions};
use rpc::proto::{self}; use rpc::proto::{self};
use theme::ActiveTheme; use theme::ActiveTheme;
use ui::{ use ui::{
Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Facepile, PopoverMenu, Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Divider, Facepile,
SplitButton, TintColor, Tooltip, prelude::*, PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*,
}; };
use util::maybe; use util::maybe;
use workspace::notifications::DetachAndPromptErr; use workspace::notifications::DetachAndPromptErr;
@ -383,6 +383,7 @@ impl TitleBar {
.detach_and_log_err(cx); .detach_and_log_err(cx);
}), }),
) )
.child(Divider::vertical())
.into_any_element(), .into_any_element(),
); );
@ -497,6 +498,7 @@ impl TitleBar {
trigger.render(window, cx), trigger.render(window, cx),
self.render_screen_list().into_any_element(), self.render_screen_list().into_any_element(),
) )
.style(SplitButtonStyle::Outlined)
.into_any_element(), .into_any_element(),
); );
} }
@ -547,10 +549,17 @@ impl TitleBar {
entry_render: Box::new(move |_, _| { entry_render: Box::new(move |_, _| {
h_flex() h_flex()
.gap_2() .gap_2()
.child(Icon::new(IconName::Screen).when( .child(
active_screenshare_id == Some(meta.id), Icon::new(IconName::Screen)
|this| this.color(Color::Accent), .size(IconSize::XSmall)
)) .map(|this| {
if active_screenshare_id == Some(meta.id) {
this.color(Color::Accent)
} else {
this.color(Color::Muted)
}
}),
)
.child(Label::new(label.clone())) .child(Label::new(label.clone()))
.child( .child(
Label::new(resolution.clone()) Label::new(resolution.clone())

View file

@ -1,6 +1,6 @@
use gpui::{ use gpui::{
AnyElement, App, BoxShadow, IntoElement, ParentElement, RenderOnce, Styled, Window, div, hsla, AnyElement, App, BoxShadow, IntoElement, ParentElement, RenderOnce, Styled, Window, div, hsla,
point, px, point, prelude::FluentBuilder, px,
}; };
use theme::ActiveTheme; use theme::ActiveTheme;
@ -8,6 +8,12 @@ use crate::{ElevationIndex, h_flex};
use super::ButtonLike; use super::ButtonLike;
#[derive(Clone, Copy, PartialEq)]
pub enum SplitButtonStyle {
Filled,
Outlined,
}
/// /// A button with two parts: a primary action on the left and a secondary action on the right. /// /// A button with two parts: a primary action on the left and a secondary action on the right.
/// ///
/// The left side is a [`ButtonLike`] with the main action, while the right side can contain /// The left side is a [`ButtonLike`] with the main action, while the right side can contain
@ -18,11 +24,21 @@ use super::ButtonLike;
pub struct SplitButton { pub struct SplitButton {
pub left: ButtonLike, pub left: ButtonLike,
pub right: AnyElement, pub right: AnyElement,
style: SplitButtonStyle,
} }
impl SplitButton { impl SplitButton {
pub fn new(left: ButtonLike, right: AnyElement) -> Self { pub fn new(left: ButtonLike, right: AnyElement) -> Self {
Self { left, right } Self {
left,
right,
style: SplitButtonStyle::Filled,
}
}
pub fn style(mut self, style: SplitButtonStyle) -> Self {
self.style = style;
self
} }
} }
@ -31,21 +47,23 @@ impl RenderOnce for SplitButton {
h_flex() h_flex()
.rounded_sm() .rounded_sm()
.border_1() .border_1()
.border_color(cx.theme().colors().text_muted.alpha(0.12)) .border_color(cx.theme().colors().border.opacity(0.5))
.child(div().flex_grow().child(self.left)) .child(div().flex_grow().child(self.left))
.child( .child(
div() div()
.h_full() .h_full()
.w_px() .w_px()
.bg(cx.theme().colors().text_muted.alpha(0.16)), .bg(cx.theme().colors().border.opacity(0.5)),
) )
.child(self.right) .child(self.right)
.bg(ElevationIndex::Surface.on_elevation_bg(cx)) .when(self.style == SplitButtonStyle::Filled, |this| {
.shadow(vec![BoxShadow { this.bg(ElevationIndex::Surface.on_elevation_bg(cx))
color: hsla(0.0, 0.0, 0.0, 0.16), .shadow(vec![BoxShadow {
offset: point(px(0.), px(1.)), color: hsla(0.0, 0.0, 0.0, 0.16),
blur_radius: px(0.), offset: point(px(0.), px(1.)),
spread_radius: px(0.), blur_radius: px(0.),
}]) spread_radius: px(0.),
}])
})
} }
} }

View file

@ -84,7 +84,9 @@ impl RenderOnce for List {
(false, _) => this.children(self.children), (false, _) => this.children(self.children),
(true, Some(false)) => this, (true, Some(false)) => this,
(true, _) => match self.empty_message { (true, _) => match self.empty_message {
EmptyMessage::Text(text) => this.child(Label::new(text).color(Color::Muted)), EmptyMessage::Text(text) => {
this.px_2().child(Label::new(text).color(Color::Muted))
}
EmptyMessage::Element(element) => this.child(element), EmptyMessage::Element(element) => this.child(element),
}, },
}) })

View file

@ -93,6 +93,7 @@ impl RenderOnce for Modal {
#[derive(IntoElement)] #[derive(IntoElement)]
pub struct ModalHeader { pub struct ModalHeader {
headline: Option<SharedString>, headline: Option<SharedString>,
description: Option<SharedString>,
children: SmallVec<[AnyElement; 2]>, children: SmallVec<[AnyElement; 2]>,
show_dismiss_button: bool, show_dismiss_button: bool,
show_back_button: bool, show_back_button: bool,
@ -108,6 +109,7 @@ impl ModalHeader {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
headline: None, headline: None,
description: None,
children: SmallVec::new(), children: SmallVec::new(),
show_dismiss_button: false, show_dismiss_button: false,
show_back_button: false, show_back_button: false,
@ -123,6 +125,11 @@ impl ModalHeader {
self self
} }
pub fn description(mut self, description: impl Into<SharedString>) -> Self {
self.description = Some(description.into());
self
}
pub fn show_dismiss_button(mut self, show: bool) -> Self { pub fn show_dismiss_button(mut self, show: bool) -> Self {
self.show_dismiss_button = show; self.show_dismiss_button = show;
self self
@ -171,7 +178,14 @@ impl RenderOnce for ModalHeader {
}), }),
) )
}) })
.child(div().flex_1().children(children)) .child(
v_flex().flex_1().children(children).when_some(
self.description,
|this, description| {
this.child(Label::new(description).color(Color::Muted).mb_2())
},
),
)
.when(self.show_dismiss_button, |this| { .when(self.show_dismiss_button, |this| {
this.child( this.child(
IconButton::new("dismiss", IconName::Close) IconButton::new("dismiss", IconName::Close)

View file

@ -588,7 +588,7 @@ impl SwitchField {
toggle_state: toggle_state.into(), toggle_state: toggle_state.into(),
on_click: Arc::new(on_click), on_click: Arc::new(on_click),
disabled: false, disabled: false,
color: SwitchColor::default(), color: SwitchColor::Accent,
} }
} }
@ -634,6 +634,15 @@ impl RenderOnce for SwitchField {
} }
}), }),
) )
.when(!self.disabled, |this| {
this.on_click({
let on_click = self.on_click.clone();
let toggle_state = self.toggle_state;
move |_click, window, cx| {
(on_click)(&toggle_state.inverse(), window, cx);
}
})
})
} }
} }

View file

@ -97,6 +97,10 @@ impl SingleLineInput {
pub fn editor(&self) -> &Entity<Editor> { pub fn editor(&self) -> &Entity<Editor> {
&self.editor &self.editor
} }
pub fn text(&self, cx: &App) -> String {
self.editor().read(cx).text(cx)
}
} }
impl Render for SingleLineInput { impl Render for SingleLineInput {

View file

@ -1,4 +1,4 @@
use client::{TelemetrySettings, telemetry::Telemetry}; use client::{DisableAiSettings, TelemetrySettings, telemetry::Telemetry};
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use gpui::{ use gpui::{
Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
@ -174,23 +174,25 @@ impl Render for WelcomePage {
.ok(); .ok();
})), })),
) )
.child( .when(!DisableAiSettings::get_global(cx).disable_ai, |parent| {
Button::new( parent.child(
"try-zed-edit-prediction", Button::new(
edit_prediction_label, "edit_prediction_onboarding",
edit_prediction_label,
)
.disabled(edit_prediction_provider_is_zed)
.icon(IconName::ZedPredict)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(
cx.listener(|_, _, window, cx| {
telemetry::event!("Welcome Screen Try Edit Prediction clicked");
window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx);
}),
),
) )
.disabled(edit_prediction_provider_is_zed) })
.icon(IconName::ZedPredict)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(
cx.listener(|_, _, window, cx| {
telemetry::event!("Welcome Screen Try Edit Prediction clicked");
window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx);
}),
),
)
.child( .child(
Button::new("edit settings", "Edit Settings") Button::new("edit settings", "Edit Settings")
.icon(IconName::Settings) .icon(IconName::Settings)

View file

@ -242,6 +242,7 @@ struct PanelEntry {
pub struct PanelButtons { pub struct PanelButtons {
dock: Entity<Dock>, dock: Entity<Dock>,
_settings_subscription: Subscription,
} }
impl Dock { impl Dock {
@ -373,6 +374,12 @@ impl Dock {
}) })
} }
pub fn first_enabled_panel_idx_excluding(&self, exclude_name: &str, cx: &App) -> Option<usize> {
self.panel_entries.iter().position(|entry| {
entry.panel.persistent_name() != exclude_name && entry.panel.enabled(cx)
})
}
fn active_panel_entry(&self) -> Option<&PanelEntry> { fn active_panel_entry(&self) -> Option<&PanelEntry> {
self.active_panel_index self.active_panel_index
.and_then(|index| self.panel_entries.get(index)) .and_then(|index| self.panel_entries.get(index))
@ -833,7 +840,11 @@ impl Render for Dock {
impl PanelButtons { impl PanelButtons {
pub fn new(dock: Entity<Dock>, cx: &mut Context<Self>) -> Self { pub fn new(dock: Entity<Dock>, cx: &mut Context<Self>) -> Self {
cx.observe(&dock, |_, _, cx| cx.notify()).detach(); cx.observe(&dock, |_, _, cx| cx.notify()).detach();
Self { dock } let settings_subscription = cx.observe_global::<SettingsStore>(|_, cx| cx.notify());
Self {
dock,
_settings_subscription: settings_subscription,
}
} }
} }

View file

@ -3239,28 +3239,37 @@ impl Pane {
split_direction = None; split_direction = None;
} }
if let Ok(open_task) = workspace.update_in(cx, |workspace, window, cx| { if let Ok((open_task, to_pane)) =
if let Some(split_direction) = split_direction { workspace.update_in(cx, |workspace, window, cx| {
to_pane = workspace.split_pane(to_pane, split_direction, window, cx); if let Some(split_direction) = split_direction {
} to_pane =
workspace.open_paths( workspace.split_pane(to_pane, split_direction, window, cx);
paths, }
OpenOptions { (
visible: Some(OpenVisible::OnlyDirectories), workspace.open_paths(
..Default::default() paths,
}, OpenOptions {
Some(to_pane.downgrade()), visible: Some(OpenVisible::OnlyDirectories),
window, ..Default::default()
cx, },
) Some(to_pane.downgrade()),
}) { window,
cx,
),
to_pane,
)
})
{
let opened_items: Vec<_> = open_task.await; let opened_items: Vec<_> = open_task.await;
_ = workspace.update(cx, |workspace, cx| { _ = workspace.update_in(cx, |workspace, window, cx| {
for item in opened_items.into_iter().flatten() { for item in opened_items.into_iter().flatten() {
if let Err(e) = item { if let Err(e) = item {
workspace.show_error(&e, cx); workspace.show_error(&e, cx);
} }
} }
if to_pane.read(cx).items_len() == 0 {
workspace.remove_pane(to_pane, None, window, cx);
}
}); });
} }
}) })

View file

@ -2454,16 +2454,16 @@ impl Snapshot {
self.entries_by_path = { self.entries_by_path = {
let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&()); let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
let mut new_entries_by_path = let mut new_entries_by_path =
cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &()); cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left);
while let Some(entry) = cursor.item() { while let Some(entry) = cursor.item() {
if entry.path.starts_with(&removed_entry.path) { if entry.path.starts_with(&removed_entry.path) {
self.entries_by_id.remove(&entry.id, &()); self.entries_by_id.remove(&entry.id, &());
cursor.next(&()); cursor.next();
} else { } else {
break; break;
} }
} }
new_entries_by_path.append(cursor.suffix(&()), &()); new_entries_by_path.append(cursor.suffix(), &());
new_entries_by_path new_entries_by_path
}; };
@ -2576,7 +2576,6 @@ impl Snapshot {
include_ignored, include_ignored,
}, },
Bias::Right, Bias::Right,
&(),
); );
Traversal { Traversal {
snapshot: self, snapshot: self,
@ -2632,7 +2631,7 @@ impl Snapshot {
options: ChildEntriesOptions, options: ChildEntriesOptions,
) -> ChildEntriesIter<'a> { ) -> ChildEntriesIter<'a> {
let mut cursor = self.entries_by_path.cursor(&()); let mut cursor = self.entries_by_path.cursor(&());
cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &()); cursor.seek(&TraversalTarget::path(parent_path), Bias::Right);
let traversal = Traversal { let traversal = Traversal {
snapshot: self, snapshot: self,
cursor, cursor,
@ -3056,9 +3055,9 @@ impl BackgroundScannerState {
.snapshot .snapshot
.entries_by_path .entries_by_path
.cursor::<TraversalProgress>(&()); .cursor::<TraversalProgress>(&());
new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &()); new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left);
removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &()); removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left);
new_entries.append(cursor.suffix(&()), &()); new_entries.append(cursor.suffix(), &());
} }
self.snapshot.entries_by_path = new_entries; self.snapshot.entries_by_path = new_entries;
@ -4925,15 +4924,15 @@ fn build_diff(
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&()); let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&()); let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut last_newly_loaded_dir_path = None; let mut last_newly_loaded_dir_path = None;
old_paths.next(&()); old_paths.next();
new_paths.next(&()); new_paths.next();
for path in event_paths { for path in event_paths {
let path = PathKey(path.clone()); let path = PathKey(path.clone());
if old_paths.item().map_or(false, |e| e.path < path.0) { if old_paths.item().map_or(false, |e| e.path < path.0) {
old_paths.seek_forward(&path, Bias::Left, &()); old_paths.seek_forward(&path, Bias::Left);
} }
if new_paths.item().map_or(false, |e| e.path < path.0) { if new_paths.item().map_or(false, |e| e.path < path.0) {
new_paths.seek_forward(&path, Bias::Left, &()); new_paths.seek_forward(&path, Bias::Left);
} }
loop { loop {
match (old_paths.item(), new_paths.item()) { match (old_paths.item(), new_paths.item()) {
@ -4949,7 +4948,7 @@ fn build_diff(
match Ord::cmp(&old_entry.path, &new_entry.path) { match Ord::cmp(&old_entry.path, &new_entry.path) {
Ordering::Less => { Ordering::Less => {
changes.push((old_entry.path.clone(), old_entry.id, Removed)); changes.push((old_entry.path.clone(), old_entry.id, Removed));
old_paths.next(&()); old_paths.next();
} }
Ordering::Equal => { Ordering::Equal => {
if phase == EventsReceivedDuringInitialScan { if phase == EventsReceivedDuringInitialScan {
@ -4975,8 +4974,8 @@ fn build_diff(
changes.push((new_entry.path.clone(), new_entry.id, Updated)); changes.push((new_entry.path.clone(), new_entry.id, Updated));
} }
} }
old_paths.next(&()); old_paths.next();
new_paths.next(&()); new_paths.next();
} }
Ordering::Greater => { Ordering::Greater => {
let is_newly_loaded = phase == InitialScan let is_newly_loaded = phase == InitialScan
@ -4988,13 +4987,13 @@ fn build_diff(
new_entry.id, new_entry.id,
if is_newly_loaded { Loaded } else { Added }, if is_newly_loaded { Loaded } else { Added },
)); ));
new_paths.next(&()); new_paths.next();
} }
} }
} }
(Some(old_entry), None) => { (Some(old_entry), None) => {
changes.push((old_entry.path.clone(), old_entry.id, Removed)); changes.push((old_entry.path.clone(), old_entry.id, Removed));
old_paths.next(&()); old_paths.next();
} }
(None, Some(new_entry)) => { (None, Some(new_entry)) => {
let is_newly_loaded = phase == InitialScan let is_newly_loaded = phase == InitialScan
@ -5006,7 +5005,7 @@ fn build_diff(
new_entry.id, new_entry.id,
if is_newly_loaded { Loaded } else { Added }, if is_newly_loaded { Loaded } else { Added },
)); ));
new_paths.next(&()); new_paths.next();
} }
(None, None) => break, (None, None) => break,
} }
@ -5255,7 +5254,7 @@ impl<'a> Traversal<'a> {
start_path: &Path, start_path: &Path,
) -> Self { ) -> Self {
let mut cursor = snapshot.entries_by_path.cursor(&()); let mut cursor = snapshot.entries_by_path.cursor(&());
cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &()); cursor.seek(&TraversalTarget::path(start_path), Bias::Left);
let mut traversal = Self { let mut traversal = Self {
snapshot, snapshot,
cursor, cursor,
@ -5282,14 +5281,13 @@ impl<'a> Traversal<'a> {
include_ignored: self.include_ignored, include_ignored: self.include_ignored,
}, },
Bias::Left, Bias::Left,
&(),
) )
} }
pub fn advance_to_sibling(&mut self) -> bool { pub fn advance_to_sibling(&mut self) -> bool {
while let Some(entry) = self.cursor.item() { while let Some(entry) = self.cursor.item() {
self.cursor self.cursor
.seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &()); .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left);
if let Some(entry) = self.cursor.item() { if let Some(entry) = self.cursor.item() {
if (self.include_files || !entry.is_file()) if (self.include_files || !entry.is_file())
&& (self.include_dirs || !entry.is_dir()) && (self.include_dirs || !entry.is_dir())
@ -5307,7 +5305,7 @@ impl<'a> Traversal<'a> {
return false; return false;
}; };
self.cursor self.cursor
.seek(&TraversalTarget::path(parent_path), Bias::Left, &()) .seek(&TraversalTarget::path(parent_path), Bias::Left)
} }
pub fn entry(&self) -> Option<&'a Entry> { pub fn entry(&self) -> Option<&'a Entry> {
@ -5326,7 +5324,7 @@ impl<'a> Traversal<'a> {
pub fn end_offset(&self) -> usize { pub fn end_offset(&self) -> usize {
self.cursor self.cursor
.end(&()) .end()
.count(self.include_files, self.include_dirs, self.include_ignored) .count(self.include_files, self.include_dirs, self.include_ignored)
} }
} }

View file

@ -554,6 +554,7 @@ pub fn main() {
supermaven::init(app_state.client.clone(), cx); supermaven::init(app_state.client.clone(), cx);
language_model::init(app_state.client.clone(), cx); language_model::init(app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
agent_settings::init(cx);
agent_servers::init(cx); agent_servers::init(cx);
web_search::init(cx); web_search::init(cx);
web_search_providers::init(app_state.client.clone(), cx); web_search_providers::init(app_state.client.clone(), cx);

View file

@ -145,15 +145,15 @@ pub fn app_menus() -> Vec<Menu> {
items: vec![ items: vec![
MenuItem::action( MenuItem::action(
"Zoom In", "Zoom In",
zed_actions::IncreaseBufferFontSize { persist: true }, zed_actions::IncreaseBufferFontSize { persist: false },
), ),
MenuItem::action( MenuItem::action(
"Zoom Out", "Zoom Out",
zed_actions::DecreaseBufferFontSize { persist: true }, zed_actions::DecreaseBufferFontSize { persist: false },
), ),
MenuItem::action( MenuItem::action(
"Reset Zoom", "Reset Zoom",
zed_actions::ResetBufferFontSize { persist: true }, zed_actions::ResetBufferFontSize { persist: false },
), ),
MenuItem::separator(), MenuItem::separator(),
MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock), MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock),

View file

@ -2,6 +2,7 @@ mod preview;
mod repl_menu; mod repl_menu;
use agent_settings::AgentSettings; use agent_settings::AgentSettings;
use client::DisableAiSettings;
use editor::actions::{ use editor::actions::{
AddSelectionAbove, AddSelectionBelow, CodeActionSource, DuplicateLineDown, GoToDiagnostic, AddSelectionAbove, AddSelectionBelow, CodeActionSource, DuplicateLineDown, GoToDiagnostic,
GoToHunk, GoToPreviousDiagnostic, GoToPreviousHunk, MoveLineDown, MoveLineUp, SelectAll, GoToHunk, GoToPreviousDiagnostic, GoToPreviousHunk, MoveLineDown, MoveLineUp, SelectAll,
@ -32,6 +33,7 @@ const MAX_CODE_ACTION_MENU_LINES: u32 = 16;
pub struct QuickActionBar { pub struct QuickActionBar {
_inlay_hints_enabled_subscription: Option<Subscription>, _inlay_hints_enabled_subscription: Option<Subscription>,
_ai_settings_subscription: Subscription,
active_item: Option<Box<dyn ItemHandle>>, active_item: Option<Box<dyn ItemHandle>>,
buffer_search_bar: Entity<BufferSearchBar>, buffer_search_bar: Entity<BufferSearchBar>,
show: bool, show: bool,
@ -46,8 +48,28 @@ impl QuickActionBar {
workspace: &Workspace, workspace: &Workspace,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let mut was_agent_enabled = AgentSettings::get_global(cx).enabled;
let mut was_agent_button = AgentSettings::get_global(cx).button;
let ai_settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let agent_settings = AgentSettings::get_global(cx);
if was_ai_disabled != is_ai_disabled
|| was_agent_enabled != agent_settings.enabled
|| was_agent_button != agent_settings.button
{
was_ai_disabled = is_ai_disabled;
was_agent_enabled = agent_settings.enabled;
was_agent_button = agent_settings.button;
cx.notify();
}
});
let mut this = Self { let mut this = Self {
_inlay_hints_enabled_subscription: None, _inlay_hints_enabled_subscription: None,
_ai_settings_subscription: ai_settings_subscription,
active_item: None, active_item: None,
buffer_search_bar, buffer_search_bar,
show: true, show: true,
@ -575,7 +597,9 @@ impl Render for QuickActionBar {
.children(self.render_preview_button(self.workspace.clone(), cx)) .children(self.render_preview_button(self.workspace.clone(), cx))
.children(search_button) .children(search_button)
.when( .when(
AgentSettings::get_global(cx).enabled && AgentSettings::get_global(cx).button, AgentSettings::get_global(cx).enabled
&& AgentSettings::get_global(cx).button
&& !DisableAiSettings::get_global(cx).disable_ai,
|bar| bar.child(assistant_button), |bar| bar.child(assistant_button),
) )
.children(code_actions_dropdown) .children(code_actions_dropdown)

View file

@ -1,10 +1,11 @@
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use client::DisableAiSettings;
use command_palette_hooks::CommandPaletteFilter; use command_palette_hooks::CommandPaletteFilter;
use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag}; use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag};
use gpui::actions; use gpui::actions;
use language::language_settings::{AllLanguageSettings, EditPredictionProvider}; use language::language_settings::{AllLanguageSettings, EditPredictionProvider};
use settings::update_settings_file; use settings::{Settings, SettingsStore, update_settings_file};
use ui::App; use ui::App;
use workspace::Workspace; use workspace::Workspace;
@ -21,6 +22,8 @@ actions!(
); );
pub fn init(cx: &mut App) { pub fn init(cx: &mut App) {
feature_gate_predict_edits_actions(cx);
cx.observe_new(move |workspace: &mut Workspace, _, _cx| { cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action(|workspace, _: &RateCompletions, window, cx| { workspace.register_action(|workspace, _: &RateCompletions, window, cx| {
if cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>() { if cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>() {
@ -53,27 +56,57 @@ pub fn init(cx: &mut App) {
}); });
}) })
.detach(); .detach();
feature_gate_predict_edits_rating_actions(cx);
} }
fn feature_gate_predict_edits_rating_actions(cx: &mut App) { fn feature_gate_predict_edits_actions(cx: &mut App) {
let rate_completion_action_types = [TypeId::of::<RateCompletions>()]; let rate_completion_action_types = [TypeId::of::<RateCompletions>()];
let reset_onboarding_action_types = [TypeId::of::<ResetOnboarding>()];
let zeta_all_action_types = [
TypeId::of::<RateCompletions>(),
TypeId::of::<ResetOnboarding>(),
zed_actions::OpenZedPredictOnboarding.type_id(),
TypeId::of::<crate::ClearHistory>(),
TypeId::of::<crate::ThumbsUpActiveCompletion>(),
TypeId::of::<crate::ThumbsDownActiveCompletion>(),
TypeId::of::<crate::NextEdit>(),
TypeId::of::<crate::PreviousEdit>(),
];
CommandPaletteFilter::update_global(cx, |filter, _cx| { CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.hide_action_types(&rate_completion_action_types); filter.hide_action_types(&rate_completion_action_types);
filter.hide_action_types(&reset_onboarding_action_types);
filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]); filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]);
}); });
cx.observe_global::<SettingsStore>(move |cx| {
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let has_feature_flag = cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>();
CommandPaletteFilter::update_global(cx, |filter, _cx| {
if is_ai_disabled {
filter.hide_action_types(&zeta_all_action_types);
} else {
if has_feature_flag {
filter.show_action_types(rate_completion_action_types.iter());
} else {
filter.hide_action_types(&rate_completion_action_types);
}
}
});
})
.detach();
cx.observe_flag::<PredictEditsRateCompletionsFeatureFlag, _>(move |is_enabled, cx| { cx.observe_flag::<PredictEditsRateCompletionsFeatureFlag, _>(move |is_enabled, cx| {
if is_enabled { if !DisableAiSettings::get_global(cx).disable_ai {
CommandPaletteFilter::update_global(cx, |filter, _cx| { if is_enabled {
filter.show_action_types(rate_completion_action_types.iter()); CommandPaletteFilter::update_global(cx, |filter, _cx| {
}); filter.show_action_types(rate_completion_action_types.iter());
} else { });
CommandPaletteFilter::update_global(cx, |filter, _cx| { } else {
filter.hide_action_types(&rate_completion_action_types); CommandPaletteFilter::update_global(cx, |filter, _cx| {
}); filter.hide_action_types(&rate_completion_action_types);
});
}
} }
}) })
.detach(); .detach();

View file

@ -444,14 +444,17 @@ Custom models will be listed in the model dropdown in the Agent Panel.
### OpenAI API Compatible {#openai-api-compatible} ### OpenAI API Compatible {#openai-api-compatible}
Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and `available_models` for the OpenAI provider. Zed supports using [OpenAI compatible APIs](https://platform.openai.com/docs/api-reference/chat) by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models.
Zed supports using OpenAI compatible APIs by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models. To configure a compatible API, you can add a custom API URL for OpenAI either via the UI (currently available only in Preview) or by editing your `settings.json`.
To configure a compatible API, you can add a custom API URL for OpenAI either via the UI or by editing your `settings.json`. For example, to connect to [Together AI](https://www.together.ai/): For example, to connect to [Together AI](https://www.together.ai/) via the UI:
1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys). 1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys).
2. Add the following to your `settings.json`: 2. Go to the Agent Panel's settings view, click on the "Add Provider" button, and then on the "OpenAI" menu item
3. Add the requested fields, such as `api_url`, `api_key`, available models, and others
Alternatively, you can also add it via the `settings.json`:
```json ```json
{ {

View file

@ -39,7 +39,7 @@ CRATE_PATH="crates/$CRATE_NAME"
mkdir -p "$CRATE_PATH/src" mkdir -p "$CRATE_PATH/src"
# Symlink the license # Symlink the license
ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE" ln -sf "../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE"
CARGO_TOML_TEMPLATE=$(cat << 'EOF' CARGO_TOML_TEMPLATE=$(cat << 'EOF'
[package] [package]